mirror of
https://github.com/home-assistant/core.git
synced 2026-01-12 10:08:19 +00:00
Compare commits
88 Commits
knx-expose
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
49086b2a76 | ||
|
|
1f28fe9933 | ||
|
|
4465aa264c | ||
|
|
2c1bc96161 | ||
|
|
7127159a5b | ||
|
|
9f0eb6f077 | ||
|
|
da19cc06e3 | ||
|
|
fd92377cf2 | ||
|
|
c201938b8b | ||
|
|
b3765204b1 | ||
|
|
786257e051 | ||
|
|
9559634151 | ||
|
|
cf12ed8f08 | ||
|
|
e213f49c75 | ||
|
|
09c7cc113a | ||
|
|
e1e7e039a9 | ||
|
|
05a0f0d23f | ||
|
|
d3853019eb | ||
|
|
ccbaac55b3 | ||
|
|
771292ced9 | ||
|
|
5d4262e8b3 | ||
|
|
d96da9a639 | ||
|
|
288a805d0f | ||
|
|
8e55ceea77 | ||
|
|
14f1d9fbad | ||
|
|
eb6582bc24 | ||
|
|
4afe67f33d | ||
|
|
5d7b10f569 | ||
|
|
340c2e48df | ||
|
|
86257b1865 | ||
|
|
eea1adccfd | ||
|
|
242be14f88 | ||
|
|
7e013b723d | ||
|
|
4d55939f53 | ||
|
|
e5e7546d49 | ||
|
|
e560795d04 | ||
|
|
15b0342bd7 | ||
|
|
8d05a5f3d4 | ||
|
|
358ad29b59 | ||
|
|
5c4f99b828 | ||
|
|
b3f123c715 | ||
|
|
85c2351af2 | ||
|
|
ec19529c99 | ||
|
|
d5ebd02afe | ||
|
|
37d82ab795 | ||
|
|
5d08481137 | ||
|
|
0861b7541d | ||
|
|
abf7078842 | ||
|
|
c4012fae4e | ||
|
|
d6082ab6c3 | ||
|
|
77367e415f | ||
|
|
6c006c68c1 | ||
|
|
026fdeb4ce | ||
|
|
1034218e6e | ||
|
|
a21062f502 | ||
|
|
2e157f1bc6 | ||
|
|
a697e63b8c | ||
|
|
d28d55c7db | ||
|
|
8863488286 | ||
|
|
53cfdef1ac | ||
|
|
42ea7ecbd6 | ||
|
|
d58d08c350 | ||
|
|
65a259b9df | ||
|
|
cbfbfbee13 | ||
|
|
e503b37ddc | ||
|
|
217eef39f3 | ||
|
|
dcdbce9b21 | ||
|
|
71db8fe185 | ||
|
|
9b96cb66d5 | ||
|
|
78bccbbbc2 | ||
|
|
b0a8f9575c | ||
|
|
61104a9970 | ||
|
|
8d13dbdd0c | ||
|
|
9afb41004e | ||
|
|
cdd542f6e6 | ||
|
|
f520686002 | ||
|
|
e4d09bb615 | ||
|
|
10f6ccf6cc | ||
|
|
d9fa67b16f | ||
|
|
cf228ae02b | ||
|
|
cb4d62ab9a | ||
|
|
d2f75aec04 | ||
|
|
a609fbc07b | ||
|
|
1b9c7ae0ac | ||
|
|
492f2117fb | ||
|
|
2346f83635 | ||
|
|
8925bfb182 | ||
|
|
8f2b1f0eff |
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.2"
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
|
||||
# 10.3 is the oldest supported version
|
||||
|
||||
3
CODEOWNERS
generated
3
CODEOWNERS
generated
@@ -1170,8 +1170,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
/tests/components/openerz/ @misialq
|
||||
/homeassistant/components/openevse/ @c00w @firstof9
|
||||
/tests/components/openevse/ @c00w @firstof9
|
||||
/homeassistant/components/openexchangerates/ @MartinHjelmare
|
||||
/tests/components/openexchangerates/ @MartinHjelmare
|
||||
/homeassistant/components/opengarage/ @danielhiversen
|
||||
@@ -1803,7 +1801,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/waqi/ @joostlek
|
||||
/homeassistant/components/water_heater/ @home-assistant/core
|
||||
/tests/components/water_heater/ @home-assistant/core
|
||||
/homeassistant/components/waterfurnace/ @sdague @masterkoppa
|
||||
/homeassistant/components/watergate/ @adam-the-hero
|
||||
/tests/components/watergate/ @adam-the-hero
|
||||
/homeassistant/components/watson_tts/ @rutkai
|
||||
|
||||
@@ -7,12 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BUTTON,
|
||||
Platform.CLIMATE,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
"""Button platform for Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest.exceptions import (
|
||||
AirobotConnectionError,
|
||||
AirobotError,
|
||||
AirobotTimeoutError,
|
||||
)
|
||||
|
||||
from homeassistant.components.button import (
|
||||
ButtonDeviceClass,
|
||||
ButtonEntity,
|
||||
ButtonEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Describes Airobot button entity."""
|
||||
|
||||
press_fn: Callable[[AirobotDataUpdateCoordinator], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
BUTTON_TYPES: tuple[AirobotButtonEntityDescription, ...] = (
|
||||
AirobotButtonEntityDescription(
|
||||
key="restart",
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_fn=lambda coordinator: coordinator.client.reboot_thermostat(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot button entities."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AirobotButton(coordinator, description) for description in BUTTON_TYPES
|
||||
)
|
||||
|
||||
|
||||
class AirobotButton(AirobotEntity, ButtonEntity):
|
||||
"""Representation of an Airobot button."""
|
||||
|
||||
entity_description: AirobotButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
description: AirobotButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the button."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
try:
|
||||
await self.entity_description.press_fn(self.coordinator)
|
||||
except (AirobotConnectionError, AirobotTimeoutError):
|
||||
# Connection errors during reboot are expected as device restarts
|
||||
pass
|
||||
except AirobotError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="button_press_failed",
|
||||
translation_placeholders={"button": self.entity_description.key},
|
||||
) from err
|
||||
@@ -86,9 +86,6 @@
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed, please reauthenticate."
|
||||
},
|
||||
"button_press_failed": {
|
||||
"message": "Failed to press {button} button."
|
||||
},
|
||||
"connection_failed": {
|
||||
"message": "Failed to communicate with device."
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["airos==0.6.1"]
|
||||
"requirements": ["airos==0.6.0"]
|
||||
}
|
||||
|
||||
@@ -140,7 +140,6 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"light",
|
||||
"lock",
|
||||
"media_player",
|
||||
"person",
|
||||
"scene",
|
||||
"siren",
|
||||
"switch",
|
||||
|
||||
@@ -34,7 +34,7 @@ class BeoData:
|
||||
|
||||
type BeoConfigEntry = ConfigEntry[BeoData]
|
||||
|
||||
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
|
||||
@@ -115,7 +115,6 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BATTERY = "battery"
|
||||
BEO_REMOTE_BUTTON = "beo_remote_button"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
|
||||
@@ -6,7 +6,6 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@@ -56,19 +55,6 @@ async def async_get_config_entry_diagnostics(
|
||||
|
||||
# Get remotes
|
||||
for remote in await get_remotes(config_entry.runtime_data.client):
|
||||
# Get Battery Sensor states
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
SENSOR_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{remote.serial_number}_{config_entry.unique_id}_remote_battery_level",
|
||||
):
|
||||
if state := hass.states.get(entity_id):
|
||||
state_dict = dict(state.as_dict())
|
||||
|
||||
# Remove context as it is not relevant
|
||||
state_dict.pop("context")
|
||||
data[f"remote_{remote.serial_number}_battery_level"] = state_dict
|
||||
|
||||
# Get key Event entity states (if enabled)
|
||||
for key_type in get_remote_keys():
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
@@ -86,15 +72,4 @@ async def async_get_config_entry_diagnostics(
|
||||
# Add remote Mozart model
|
||||
data[f"remote_{remote.serial_number}"] = dict(remote)
|
||||
|
||||
# Get Mozart battery entity
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
SENSOR_DOMAIN, DOMAIN, f"{config_entry.unique_id}_battery_level"
|
||||
):
|
||||
if state := hass.states.get(entity_id):
|
||||
state_dict = dict(state.as_dict())
|
||||
|
||||
# Remove context as it is not relevant
|
||||
state_dict.pop("context")
|
||||
data["battery_level"] = state_dict
|
||||
|
||||
return data
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
"""Sensor entities for the Bang & Olufsen integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
from datetime import timedelta
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.models import BatteryState, PairedRemote
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BeoConfigEntry
|
||||
from .const import CONNECTION_STATUS, DOMAIN, WebsocketNotification
|
||||
from .entity import BeoEntity
|
||||
from .util import get_remotes, supports_battery
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BeoConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sensor entities from config entry."""
|
||||
entities: list[BeoSensor] = []
|
||||
|
||||
# Check for Mozart device with battery
|
||||
if await supports_battery(config_entry.runtime_data.client):
|
||||
entities.append(BeoSensorBatteryLevel(config_entry))
|
||||
|
||||
# Add any Beoremote One remotes
|
||||
entities.extend(
|
||||
[
|
||||
BeoSensorRemoteBatteryLevel(config_entry, remote)
|
||||
for remote in (await get_remotes(config_entry.runtime_data.client))
|
||||
]
|
||||
)
|
||||
|
||||
async_add_entities(entities, update_before_add=True)
|
||||
|
||||
|
||||
class BeoSensor(SensorEntity, BeoEntity):
|
||||
"""Base Bang & Olufsen Sensor."""
|
||||
|
||||
def __init__(self, config_entry: BeoConfigEntry) -> None:
|
||||
"""Initialize Sensor."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
|
||||
class BeoSensorBatteryLevel(BeoSensor):
|
||||
"""Battery level Sensor for Mozart devices."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.BATTERY
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, config_entry: BeoConfigEntry) -> None:
|
||||
"""Init the battery level Sensor."""
|
||||
super().__init__(config_entry)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_battery_level"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BATTERY}",
|
||||
self._update_battery,
|
||||
)
|
||||
)
|
||||
|
||||
async def _update_battery(self, data: BatteryState) -> None:
|
||||
"""Update sensor value."""
|
||||
self._attr_native_value = data.battery_level
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class BeoSensorRemoteBatteryLevel(BeoSensor):
|
||||
"""Battery level Sensor for the Beoremote One."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.BATTERY
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_should_poll = True
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, config_entry: BeoConfigEntry, remote: PairedRemote) -> None:
|
||||
"""Init the battery level Sensor."""
|
||||
super().__init__(config_entry)
|
||||
# Serial number is not None, as the remote object is provided by get_remotes
|
||||
assert remote.serial_number
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{remote.serial_number}_{self._unique_id}_remote_battery_level"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")}
|
||||
)
|
||||
self._attr_native_value = remote.battery_level
|
||||
self._remote = remote
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Poll battery status."""
|
||||
with contextlib.suppress(ApiException, ClientConnectorError, TimeoutError):
|
||||
for remote in await get_remotes(self._client):
|
||||
if remote.serial_number == self._remote.serial_number:
|
||||
self._attr_native_value = remote.battery_level
|
||||
break
|
||||
@@ -84,10 +84,3 @@ def get_remote_keys() -> list[str]:
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
async def supports_battery(client: MozartClient) -> bool:
|
||||
"""Get if a Mozart device has a battery."""
|
||||
battery_state = await client.get_battery_state()
|
||||
|
||||
return battery_state.state != "BatteryNotPresent"
|
||||
|
||||
@@ -6,7 +6,6 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
BatteryState,
|
||||
BeoRemoteButton,
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
@@ -61,7 +60,6 @@ class BeoWebsocket(BeoBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_battery_notifications(self.on_battery_notification)
|
||||
self._client.get_beo_remote_button_notifications(
|
||||
self.on_beo_remote_button_notification
|
||||
)
|
||||
@@ -117,14 +115,6 @@ class BeoWebsocket(BeoBase):
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_battery_notification(self, notification: BatteryState) -> None:
|
||||
"""Send battery dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BATTERY}",
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_beo_remote_button_notification(self, notification: BeoRemoteButton) -> None:
|
||||
"""Send beo_remote_button dispatch."""
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -22,7 +22,7 @@ from homeassistant.components.media_player import MediaType
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_CLIENT_ID, CONF_PIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -56,31 +56,8 @@ def catch_braviatv_errors[_BraviaTVCoordinatorT: BraviaTVCoordinator, **_P](
|
||||
"""Catch Bravia errors and log message."""
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except BraviaNotFound as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_error_not_found",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
},
|
||||
) from err
|
||||
except (BraviaConnectionError, BraviaConnectionTimeout, BraviaTurnedOff) as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_error_offline",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
},
|
||||
) from err
|
||||
except BraviaError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_error",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
"error": repr(err),
|
||||
},
|
||||
) from err
|
||||
_LOGGER.error("Command error: %s", err)
|
||||
await self.async_request_refresh()
|
||||
|
||||
return wrapper
|
||||
@@ -188,35 +165,17 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
|
||||
if self.skipped_updates < 10:
|
||||
self.connected = False
|
||||
self.skipped_updates += 1
|
||||
_LOGGER.debug(
|
||||
"Update for %s skipped: the Bravia API service is reloading",
|
||||
self.config_entry.title,
|
||||
)
|
||||
_LOGGER.debug("Update skipped, Bravia API service is reloading")
|
||||
return
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error_not_found",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
},
|
||||
) from err
|
||||
raise UpdateFailed("Error communicating with device") from err
|
||||
except (BraviaConnectionError, BraviaConnectionTimeout, BraviaTurnedOff):
|
||||
self.is_on = False
|
||||
self.connected = False
|
||||
_LOGGER.debug(
|
||||
"Update for %s skipped: the TV is turned off", self.config_entry.title
|
||||
)
|
||||
_LOGGER.debug("Update skipped, Bravia TV is off")
|
||||
except BraviaError as err:
|
||||
self.is_on = False
|
||||
self.connected = False
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
"error": repr(err),
|
||||
},
|
||||
) from err
|
||||
raise UpdateFailed("Error communicating with device") from err
|
||||
|
||||
async def async_update_volume(self) -> None:
|
||||
"""Update volume information."""
|
||||
|
||||
@@ -55,22 +55,5 @@
|
||||
"name": "Terminate apps"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"command_error": {
|
||||
"message": "Error sending command to {device}: {error}"
|
||||
},
|
||||
"command_error_not_found": {
|
||||
"message": "Error sending command to {device}: the Bravia API service is reloading"
|
||||
},
|
||||
"command_error_offline": {
|
||||
"message": "Error sending command to {device}: the TV is turned off"
|
||||
},
|
||||
"update_error": {
|
||||
"message": "Error updating data for {device}: {error}"
|
||||
},
|
||||
"update_error_not_found": {
|
||||
"message": "Error updating data for {device}: the Bravia API service is stuck"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,9 +2,6 @@
|
||||
"services": {
|
||||
"set_hot_water_schedule": {
|
||||
"service": "mdi:calendar-clock"
|
||||
},
|
||||
"sync_time": {
|
||||
"service": "mdi:timer-sync-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==3.1.6"],
|
||||
"requirements": ["python-bsblan==3.1.4"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -13,7 +13,6 @@ from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -31,9 +30,8 @@ ATTR_FRIDAY_SLOTS = "friday_slots"
|
||||
ATTR_SATURDAY_SLOTS = "saturday_slots"
|
||||
ATTR_SUNDAY_SLOTS = "sunday_slots"
|
||||
|
||||
# Service names
|
||||
# Service name
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
|
||||
SERVICE_SYNC_TIME = "sync_time"
|
||||
|
||||
|
||||
# Schema for a single time slot
|
||||
@@ -205,74 +203,6 @@ async def set_hot_water_schedule(service_call: ServiceCall) -> None:
|
||||
await entry.runtime_data.slow_coordinator.async_request_refresh()
|
||||
|
||||
|
||||
async def async_sync_time(service_call: ServiceCall) -> None:
|
||||
"""Synchronize BSB-LAN device time with Home Assistant."""
|
||||
device_id: str = service_call.data[ATTR_DEVICE_ID]
|
||||
|
||||
# Get the device and config entry
|
||||
device_registry = dr.async_get(service_call.hass)
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
|
||||
if device_entry is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_device_id",
|
||||
translation_placeholders={"device_id": device_id},
|
||||
)
|
||||
|
||||
# Find the config entry for this device
|
||||
matching_entries: list[BSBLanConfigEntry] = [
|
||||
entry
|
||||
for entry in service_call.hass.config_entries.async_entries(DOMAIN)
|
||||
if entry.entry_id in device_entry.config_entries
|
||||
]
|
||||
|
||||
if not matching_entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_config_entry_for_device",
|
||||
translation_placeholders={"device_id": device_entry.name or device_id},
|
||||
)
|
||||
|
||||
entry = matching_entries[0]
|
||||
|
||||
# Verify the config entry is loaded
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_loaded",
|
||||
translation_placeholders={"device_name": device_entry.name or device_id},
|
||||
)
|
||||
|
||||
client = entry.runtime_data.client
|
||||
|
||||
try:
|
||||
# Get current device time
|
||||
device_time = await client.time()
|
||||
current_time = dt_util.now()
|
||||
current_time_str = current_time.strftime("%d.%m.%Y %H:%M:%S")
|
||||
|
||||
# Only sync if device time differs from HA time
|
||||
if device_time.time.value != current_time_str:
|
||||
await client.set_time(current_time_str)
|
||||
except BSBLANError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="sync_time_failed",
|
||||
translation_placeholders={
|
||||
"device_name": device_entry.name or device_id,
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
|
||||
|
||||
SYNC_TIME_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the BSB-Lan services."""
|
||||
@@ -282,10 +212,3 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
set_hot_water_schedule,
|
||||
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SYNC_TIME,
|
||||
async_sync_time,
|
||||
schema=SYNC_TIME_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -1,12 +1,3 @@
|
||||
sync_time:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
example: "abc123device456"
|
||||
selector:
|
||||
device:
|
||||
integration: bsblan
|
||||
|
||||
set_hot_water_schedule:
|
||||
fields:
|
||||
device_id:
|
||||
|
||||
@@ -79,6 +79,9 @@
|
||||
"invalid_device_id": {
|
||||
"message": "Invalid device ID: {device_id}"
|
||||
},
|
||||
"invalid_time_format": {
|
||||
"message": "Invalid time format provided"
|
||||
},
|
||||
"no_config_entry_for_device": {
|
||||
"message": "No configuration entry found for device: {device_id}"
|
||||
},
|
||||
@@ -105,9 +108,6 @@
|
||||
},
|
||||
"setup_general_error": {
|
||||
"message": "An unknown error occurred while retrieving static device data"
|
||||
},
|
||||
"sync_time_failed": {
|
||||
"message": "Failed to sync time for {device_name}: {error}"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -148,16 +148,6 @@
|
||||
}
|
||||
},
|
||||
"name": "Set hot water schedule"
|
||||
},
|
||||
"sync_time": {
|
||||
"description": "Synchronize Home Assistant time to the BSB-Lan device. Only updates if device time differs from Home Assistant time.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"description": "The BSB-LAN device to sync time for.",
|
||||
"name": "Device"
|
||||
}
|
||||
},
|
||||
"name": "Sync time"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["async_upnp_client"],
|
||||
"requirements": ["async-upnp-client==0.46.2", "getmac==0.9.5"],
|
||||
"requirements": ["async-upnp-client==0.46.1", "getmac==0.9.5"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["async-upnp-client==0.46.2"],
|
||||
"requirements": ["async-upnp-client==0.46.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Duck DNS integration."""
|
||||
"""Integrate with DuckDNS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -4,6 +4,5 @@
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/duckdns",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import ClientError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
@@ -63,25 +62,9 @@ async def update_domain_service(call: ServiceCall) -> None:
|
||||
|
||||
session = async_get_clientsession(call.hass)
|
||||
|
||||
try:
|
||||
if not await update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={
|
||||
CONF_DOMAIN: entry.data[CONF_DOMAIN],
|
||||
},
|
||||
)
|
||||
except ClientError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
translation_placeholders={
|
||||
CONF_DOMAIN: entry.data[CONF_DOMAIN],
|
||||
},
|
||||
) from e
|
||||
await update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
)
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eheimdigital"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["eheimdigital==1.5.0"],
|
||||
"requirements": ["eheimdigital==1.4.0"],
|
||||
"zeroconf": [
|
||||
{ "name": "eheimdigital._http._tcp.local.", "type": "_http._tcp.local." }
|
||||
]
|
||||
|
||||
@@ -206,7 +206,7 @@ class EnvoyProductionSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes an Envoy production sensor entity."""
|
||||
|
||||
value_fn: Callable[[EnvoySystemProduction], int]
|
||||
on_phase: str | None = None
|
||||
on_phase: str | None
|
||||
|
||||
|
||||
PRODUCTION_SENSORS = (
|
||||
@@ -219,6 +219,7 @@ PRODUCTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watts_now"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyProductionSensorEntityDescription(
|
||||
key="daily_production",
|
||||
@@ -229,6 +230,7 @@ PRODUCTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=2,
|
||||
value_fn=attrgetter("watt_hours_today"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyProductionSensorEntityDescription(
|
||||
key="seven_days_production",
|
||||
@@ -238,6 +240,7 @@ PRODUCTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
value_fn=attrgetter("watt_hours_last_7_days"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyProductionSensorEntityDescription(
|
||||
key="lifetime_production",
|
||||
@@ -248,6 +251,7 @@ PRODUCTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watt_hours_lifetime"),
|
||||
on_phase=None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -273,7 +277,7 @@ class EnvoyConsumptionSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes an Envoy consumption sensor entity."""
|
||||
|
||||
value_fn: Callable[[EnvoySystemConsumption], int]
|
||||
on_phase: str | None = None
|
||||
on_phase: str | None
|
||||
|
||||
|
||||
CONSUMPTION_SENSORS = (
|
||||
@@ -286,6 +290,7 @@ CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watts_now"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyConsumptionSensorEntityDescription(
|
||||
key="daily_consumption",
|
||||
@@ -296,6 +301,7 @@ CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=2,
|
||||
value_fn=attrgetter("watt_hours_today"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyConsumptionSensorEntityDescription(
|
||||
key="seven_days_consumption",
|
||||
@@ -305,6 +311,7 @@ CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
value_fn=attrgetter("watt_hours_last_7_days"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyConsumptionSensorEntityDescription(
|
||||
key="lifetime_consumption",
|
||||
@@ -315,6 +322,7 @@ CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watt_hours_lifetime"),
|
||||
on_phase=None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -346,6 +354,7 @@ NET_CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watts_now"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyConsumptionSensorEntityDescription(
|
||||
key="lifetime_balanced_net_consumption",
|
||||
@@ -357,6 +366,7 @@ NET_CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watt_hours_lifetime"),
|
||||
on_phase=None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -385,7 +395,7 @@ class EnvoyCTSensorEntityDescription(SensorEntityDescription):
|
||||
[EnvoyMeterData],
|
||||
int | float | str | CtType | CtMeterStatus | CtStatusFlags | CtState | None,
|
||||
]
|
||||
on_phase: str | None = None
|
||||
on_phase: str | None
|
||||
cttype: str | None = None
|
||||
|
||||
|
||||
@@ -401,6 +411,7 @@ CT_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_delivered"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -419,6 +430,7 @@ CT_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_received"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -437,6 +449,7 @@ CT_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("active_power"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -455,6 +468,7 @@ CT_SENSORS = (
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("frequency"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key, translation_key in (
|
||||
@@ -474,6 +488,7 @@ CT_SENSORS = (
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("voltage"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key, translation_key in (
|
||||
@@ -493,6 +508,7 @@ CT_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("current"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -510,6 +526,7 @@ CT_SENSORS = (
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("power_factor"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -527,6 +544,7 @@ CT_SENSORS = (
|
||||
options=list(CtMeterStatus),
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("metering_status"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key, translation_key in (
|
||||
@@ -547,6 +565,7 @@ CT_SENSORS = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key, translation_key in (
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.10.1",
|
||||
"aioesphomeapi==43.9.1",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -19,9 +19,6 @@ from .coordinator import FeedReaderCoordinator
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
ATTR_CONTENT = "content"
|
||||
ATTR_DESCRIPTION = "description"
|
||||
ATTR_LINK = "link"
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: missing test for uniqueness of feed URL.
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: missing data descriptions
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: No authentication support.
|
||||
test-coverage:
|
||||
status: done
|
||||
comment: Can use freezer for skipping time instead
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery support.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: No discovery support.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Each config entry, represents one service.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: Matches no available event entity class.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: Only one entity per config entry.
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: done
|
||||
comment: Only one repair-issue for yaml-import defined.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Each config entry, represents one service.
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: todo
|
||||
comment: feedparser lib is not async.
|
||||
inject-websession:
|
||||
status: todo
|
||||
comment: feedparser lib doesn't take a session as argument.
|
||||
strict-typing:
|
||||
status: todo
|
||||
comment: feedparser lib is not fully typed.
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.10"]
|
||||
"requirements": ["pyfirefly==0.1.8"]
|
||||
}
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fressnapftracker import FressnapfTrackerError
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ColorMode,
|
||||
@@ -18,7 +16,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import FressnapfTrackerConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .entity import FressnapfTrackerEntity
|
||||
from .services import handle_fressnapf_tracker_exception
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -64,18 +61,12 @@ class FressnapfTrackerLight(FressnapfTrackerEntity, LightEntity):
|
||||
self.raise_if_not_activatable()
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
|
||||
brightness = int((brightness / 255) * 100)
|
||||
try:
|
||||
await self.coordinator.client.set_led_brightness(brightness)
|
||||
except FressnapfTrackerError as e:
|
||||
handle_fressnapf_tracker_exception(e)
|
||||
await self.coordinator.client.set_led_brightness(brightness)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the device."""
|
||||
try:
|
||||
await self.coordinator.client.set_led_brightness(0)
|
||||
except FressnapfTrackerError as e:
|
||||
handle_fressnapf_tracker_exception(e)
|
||||
await self.coordinator.client.set_led_brightness(0)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
def raise_if_not_activatable(self) -> None:
|
||||
|
||||
@@ -26,7 +26,7 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"""Services and service helpers for fressnapf_tracker."""
|
||||
|
||||
from fressnapftracker import FressnapfTrackerError, FressnapfTrackerInvalidTokenError
|
||||
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def handle_fressnapf_tracker_exception(exception: FressnapfTrackerError):
|
||||
"""Handle the different FressnapfTracker errors."""
|
||||
if isinstance(exception, FressnapfTrackerInvalidTokenError):
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
) from exception
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error_message": str(exception)},
|
||||
) from exception
|
||||
@@ -77,9 +77,6 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"api_error": {
|
||||
"message": "An error occurred while communicating with the Fressnapf Tracker API: {error_message}"
|
||||
},
|
||||
"charging": {
|
||||
"message": "The flashlight cannot be activated while charging."
|
||||
},
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fressnapftracker import FressnapfTrackerError
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
SwitchDeviceClass,
|
||||
SwitchEntity,
|
||||
@@ -15,7 +13,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import FressnapfTrackerConfigEntry
|
||||
from .entity import FressnapfTrackerEntity
|
||||
from .services import handle_fressnapf_tracker_exception
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -46,18 +43,12 @@ class FressnapfTrackerSwitch(FressnapfTrackerEntity, SwitchEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the device."""
|
||||
try:
|
||||
await self.coordinator.client.set_energy_saving(True)
|
||||
except FressnapfTrackerError as e:
|
||||
handle_fressnapf_tracker_exception(e)
|
||||
await self.coordinator.client.set_energy_saving(True)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the device."""
|
||||
try:
|
||||
await self.coordinator.client.set_energy_saving(False)
|
||||
except FressnapfTrackerError as e:
|
||||
handle_fressnapf_tracker_exception(e)
|
||||
await self.coordinator.client.set_energy_saving(False)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@property
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["google-genai==1.56.0"]
|
||||
"requirements": ["google-genai==1.38.0"]
|
||||
}
|
||||
|
||||
@@ -23,6 +23,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.28.0"],
|
||||
"requirements": ["aiohomeconnect==0.26.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ set_program_and_options:
|
||||
- active_program
|
||||
- selected_program
|
||||
program:
|
||||
example: dishcare_dishwasher_program_auto_2
|
||||
example: dishcare_dishwasher_program_auto2
|
||||
selector:
|
||||
select:
|
||||
mode: dropdown
|
||||
@@ -121,7 +121,6 @@ set_program_and_options:
|
||||
- cooking_oven_program_heating_mode_pre_heating
|
||||
- cooking_oven_program_heating_mode_hot_air
|
||||
- cooking_oven_program_heating_mode_hot_air_eco
|
||||
- cooking_oven_program_heating_mode_hot_air_gentle
|
||||
- cooking_oven_program_heating_mode_hot_air_grilling
|
||||
- cooking_oven_program_heating_mode_top_bottom_heating
|
||||
- cooking_oven_program_heating_mode_top_bottom_heating_eco
|
||||
@@ -148,7 +147,6 @@ set_program_and_options:
|
||||
- cooking_oven_program_microwave_900_watt
|
||||
- cooking_oven_program_microwave_1000_watt
|
||||
- cooking_oven_program_microwave_max
|
||||
- cooking_oven_program_steam_modes_steam
|
||||
- cooking_oven_program_heating_mode_warming_drawer
|
||||
- laundry_care_washer_program_auto_30
|
||||
- laundry_care_washer_program_auto_40
|
||||
@@ -176,7 +174,7 @@ set_program_and_options:
|
||||
- laundry_care_washer_program_rinse_rinse_spin_drain
|
||||
- laundry_care_washer_program_sensitive
|
||||
- laundry_care_washer_program_shirts_blouses
|
||||
- laundry_care_washer_program_spin_spin_drain
|
||||
- laundry_care_washer_program_spin_drain
|
||||
- laundry_care_washer_program_sport_fitness
|
||||
- laundry_care_washer_program_super_153045_super_15
|
||||
- laundry_care_washer_program_super_153045_super_1530
|
||||
|
||||
@@ -240,7 +240,6 @@
|
||||
"cooking_oven_program_heating_mode_hot_air_60_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_60_steam%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_80_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_80_steam%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_eco": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_eco%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_gentle": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_gentle%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_grilling": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_grilling%]",
|
||||
"cooking_oven_program_heating_mode_intensive_heat": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_intensive_heat%]",
|
||||
"cooking_oven_program_heating_mode_keep_warm": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_keep_warm%]",
|
||||
@@ -351,7 +350,7 @@
|
||||
"laundry_care_washer_program_rinse_rinse_spin_drain": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_rinse_rinse_spin_drain%]",
|
||||
"laundry_care_washer_program_sensitive": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_sensitive%]",
|
||||
"laundry_care_washer_program_shirts_blouses": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_shirts_blouses%]",
|
||||
"laundry_care_washer_program_spin_spin_drain": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_spin_spin_drain%]",
|
||||
"laundry_care_washer_program_spin_drain": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_spin_drain%]",
|
||||
"laundry_care_washer_program_sport_fitness": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_sport_fitness%]",
|
||||
"laundry_care_washer_program_super_153045_super_15": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_super_153045_super_15%]",
|
||||
"laundry_care_washer_program_super_153045_super_1530": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_super_153045_super_1530%]",
|
||||
@@ -593,7 +592,6 @@
|
||||
"cooking_oven_program_heating_mode_hot_air_60_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_60_steam%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_80_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_80_steam%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_eco": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_eco%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_gentle": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_gentle%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_grilling": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_grilling%]",
|
||||
"cooking_oven_program_heating_mode_intensive_heat": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_intensive_heat%]",
|
||||
"cooking_oven_program_heating_mode_keep_warm": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_keep_warm%]",
|
||||
@@ -614,7 +612,6 @@
|
||||
"cooking_oven_program_microwave_900_watt": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_microwave_900_watt%]",
|
||||
"cooking_oven_program_microwave_90_watt": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_microwave_90_watt%]",
|
||||
"cooking_oven_program_microwave_max": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_microwave_max%]",
|
||||
"cooking_oven_program_steam_modes_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_steam_modes_steam%]",
|
||||
"dishcare_dishwasher_program_auto_1": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_auto_1%]",
|
||||
"dishcare_dishwasher_program_auto_2": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_auto_2%]",
|
||||
"dishcare_dishwasher_program_auto_3": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_auto_3%]",
|
||||
@@ -705,7 +702,7 @@
|
||||
"laundry_care_washer_program_rinse_rinse_spin_drain": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_rinse_rinse_spin_drain%]",
|
||||
"laundry_care_washer_program_sensitive": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_sensitive%]",
|
||||
"laundry_care_washer_program_shirts_blouses": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_shirts_blouses%]",
|
||||
"laundry_care_washer_program_spin_spin_drain": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_spin_spin_drain%]",
|
||||
"laundry_care_washer_program_spin_drain": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_spin_drain%]",
|
||||
"laundry_care_washer_program_sport_fitness": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_sport_fitness%]",
|
||||
"laundry_care_washer_program_super_153045_super_15": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_super_153045_super_15%]",
|
||||
"laundry_care_washer_program_super_153045_super_1530": "[%key:component::home_connect::selector::programs::options::laundry_care_washer_program_super_153045_super_1530%]",
|
||||
@@ -1586,7 +1583,6 @@
|
||||
"cooking_oven_program_heating_mode_hot_air_60_steam": "Hot air + 60 RH",
|
||||
"cooking_oven_program_heating_mode_hot_air_80_steam": "Hot air + 80 RH",
|
||||
"cooking_oven_program_heating_mode_hot_air_eco": "Hot air eco",
|
||||
"cooking_oven_program_heating_mode_hot_air_gentle": "Hot air gentle",
|
||||
"cooking_oven_program_heating_mode_hot_air_grilling": "Hot air grilling",
|
||||
"cooking_oven_program_heating_mode_intensive_heat": "Intensive heat",
|
||||
"cooking_oven_program_heating_mode_keep_warm": "Keep warm",
|
||||
@@ -1607,7 +1603,6 @@
|
||||
"cooking_oven_program_microwave_900_watt": "900 Watt",
|
||||
"cooking_oven_program_microwave_90_watt": "90 Watt",
|
||||
"cooking_oven_program_microwave_max": "Max",
|
||||
"cooking_oven_program_steam_modes_steam": "Steam mode",
|
||||
"dishcare_dishwasher_program_auto_1": "Auto 1",
|
||||
"dishcare_dishwasher_program_auto_2": "Auto 2",
|
||||
"dishcare_dishwasher_program_auto_3": "Auto 3",
|
||||
@@ -1698,7 +1693,7 @@
|
||||
"laundry_care_washer_program_rinse_rinse_spin_drain": "Rinse spin drain",
|
||||
"laundry_care_washer_program_sensitive": "Sensitive",
|
||||
"laundry_care_washer_program_shirts_blouses": "Shirts/blouses",
|
||||
"laundry_care_washer_program_spin_spin_drain": "Spin/drain",
|
||||
"laundry_care_washer_program_spin_drain": "Spin/drain",
|
||||
"laundry_care_washer_program_sport_fitness": "Sport/fitness",
|
||||
"laundry_care_washer_program_super_153045_super_15": "Super 15 min",
|
||||
"laundry_care_washer_program_super_153045_super_1530": "Super 15/30 min",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["jvcprojector"],
|
||||
"requirements": ["pyjvcprojector==1.1.3"]
|
||||
"requirements": ["pyjvcprojector==1.1.2"]
|
||||
}
|
||||
|
||||
@@ -41,13 +41,6 @@ COMMANDS = {
|
||||
"mode_1": const.REMOTE_MODE_1,
|
||||
"mode_2": const.REMOTE_MODE_2,
|
||||
"mode_3": const.REMOTE_MODE_3,
|
||||
"mode_4": const.REMOTE_MODE_4,
|
||||
"mode_5": const.REMOTE_MODE_5,
|
||||
"mode_6": const.REMOTE_MODE_6,
|
||||
"mode_7": const.REMOTE_MODE_7,
|
||||
"mode_8": const.REMOTE_MODE_8,
|
||||
"mode_9": const.REMOTE_MODE_9,
|
||||
"mode_10": const.REMOTE_MODE_10,
|
||||
"lens_ap": const.REMOTE_LENS_AP,
|
||||
"gamma": const.REMOTE_GAMMA,
|
||||
"color_temp": const.REMOTE_COLOR_TEMP,
|
||||
|
||||
@@ -27,7 +27,7 @@ from .const import (
|
||||
SUPPORTED_PLATFORMS_UI,
|
||||
SUPPORTED_PLATFORMS_YAML,
|
||||
)
|
||||
from .expose import create_combined_knx_exposure
|
||||
from .expose import create_knx_exposure
|
||||
from .knx_module import KNXModule
|
||||
from .project import STORAGE_KEY as PROJECT_STORAGE_KEY
|
||||
from .schema import (
|
||||
@@ -121,10 +121,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[KNX_MODULE_KEY] = knx_module
|
||||
|
||||
if CONF_KNX_EXPOSE in config:
|
||||
knx_module.yaml_exposures.extend(
|
||||
create_combined_knx_exposure(hass, knx_module.xknx, config[CONF_KNX_EXPOSE])
|
||||
)
|
||||
|
||||
for expose_config in config[CONF_KNX_EXPOSE]:
|
||||
knx_module.exposures.append(
|
||||
create_knx_exposure(hass, knx_module.xknx, expose_config)
|
||||
)
|
||||
configured_platforms_yaml = {
|
||||
platform for platform in SUPPORTED_PLATFORMS_YAML if platform in config
|
||||
}
|
||||
@@ -149,9 +149,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# if not loaded directly return
|
||||
return True
|
||||
|
||||
for exposure in knx_module.yaml_exposures:
|
||||
exposure.async_remove()
|
||||
for exposure in knx_module.service_exposures.values():
|
||||
for exposure in knx_module.exposures:
|
||||
exposure.async_remove()
|
||||
|
||||
configured_platforms_yaml = {
|
||||
|
||||
@@ -2,21 +2,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Iterable
|
||||
from dataclasses import dataclass
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.devices import DateDevice, DateTimeDevice, ExposeSensor, TimeDevice
|
||||
from xknx.dpt import DPTBase, DPTNumeric, DPTString
|
||||
from xknx.dpt.dpt_1 import DPT1BitEnum, DPTSwitch
|
||||
from xknx.dpt import DPTNumeric, DPTString
|
||||
from xknx.exceptions import ConversionError
|
||||
from xknx.telegram.address import (
|
||||
GroupAddress,
|
||||
InternalGroupAddress,
|
||||
parse_device_group_address,
|
||||
)
|
||||
from xknx.remote_value import RemoteValueSensor
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_ENTITY_ID,
|
||||
@@ -48,159 +41,79 @@ _LOGGER = logging.getLogger(__name__)
|
||||
@callback
|
||||
def create_knx_exposure(
|
||||
hass: HomeAssistant, xknx: XKNX, config: ConfigType
|
||||
) -> KnxExposeEntity | KnxExposeTime:
|
||||
"""Create single exposure."""
|
||||
) -> KNXExposeSensor | KNXExposeTime:
|
||||
"""Create exposures from config."""
|
||||
|
||||
expose_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
exposure: KnxExposeEntity | KnxExposeTime
|
||||
|
||||
exposure: KNXExposeSensor | KNXExposeTime
|
||||
if (
|
||||
isinstance(expose_type, str)
|
||||
and expose_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES
|
||||
):
|
||||
exposure = KnxExposeTime(
|
||||
exposure = KNXExposeTime(
|
||||
xknx=xknx,
|
||||
config=config,
|
||||
)
|
||||
else:
|
||||
exposure = KnxExposeEntity(
|
||||
hass=hass,
|
||||
exposure = KNXExposeSensor(
|
||||
hass,
|
||||
xknx=xknx,
|
||||
entity_id=config[CONF_ENTITY_ID],
|
||||
options=(_yaml_config_to_expose_options(config),),
|
||||
config=config,
|
||||
)
|
||||
exposure.async_register()
|
||||
return exposure
|
||||
|
||||
|
||||
@callback
|
||||
def create_combined_knx_exposure(
|
||||
hass: HomeAssistant, xknx: XKNX, configs: list[ConfigType]
|
||||
) -> list[KnxExposeEntity | KnxExposeTime]:
|
||||
"""Create exposures from YAML config combined by entity_id."""
|
||||
exposures: list[KnxExposeEntity | KnxExposeTime] = []
|
||||
entity_exposure_map: dict[str, list[KnxExposeOptions]] = {}
|
||||
|
||||
for config in configs:
|
||||
value_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
if value_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES:
|
||||
time_exposure = KnxExposeTime(
|
||||
xknx=xknx,
|
||||
config=config,
|
||||
)
|
||||
time_exposure.async_register()
|
||||
exposures.append(time_exposure)
|
||||
continue
|
||||
|
||||
entity_id = config[CONF_ENTITY_ID]
|
||||
option = _yaml_config_to_expose_options(config)
|
||||
entity_exposure_map.setdefault(entity_id, []).append(option)
|
||||
|
||||
for entity_id, options in entity_exposure_map.items():
|
||||
entity_exposure = KnxExposeEntity(
|
||||
hass=hass,
|
||||
xknx=xknx,
|
||||
entity_id=entity_id,
|
||||
options=options,
|
||||
)
|
||||
entity_exposure.async_register()
|
||||
exposures.append(entity_exposure)
|
||||
return exposures
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class KnxExposeOptions:
|
||||
"""Options for KNX Expose."""
|
||||
|
||||
attribute: str | None
|
||||
group_address: GroupAddress | InternalGroupAddress
|
||||
dpt: type[DPTBase]
|
||||
respond_to_read: bool
|
||||
cooldown: float
|
||||
default: Any | None
|
||||
value_template: Template | None
|
||||
|
||||
|
||||
def _yaml_config_to_expose_options(config: ConfigType) -> KnxExposeOptions:
|
||||
"""Convert single yaml expose config to KnxExposeOptions."""
|
||||
value_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
dpt: type[DPTBase]
|
||||
if value_type == "binary":
|
||||
# HA yaml expose flag for DPT-1 (no explicit DPT 1 definitions in xknx back then)
|
||||
dpt = DPTSwitch
|
||||
else:
|
||||
dpt = DPTBase.parse_transcoder(config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]) # type: ignore[assignment] # checked by schema validation
|
||||
ga = parse_device_group_address(config[KNX_ADDRESS])
|
||||
return KnxExposeOptions(
|
||||
attribute=config.get(ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE),
|
||||
group_address=ga,
|
||||
dpt=dpt,
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN],
|
||||
default=config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT),
|
||||
value_template=config.get(CONF_VALUE_TEMPLATE),
|
||||
)
|
||||
|
||||
|
||||
class KnxExposeEntity:
|
||||
"""Expose Home Assistant entity values to KNX bus."""
|
||||
class KNXExposeSensor:
|
||||
"""Object to Expose Home Assistant entity to KNX bus."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
xknx: XKNX,
|
||||
entity_id: str,
|
||||
options: Iterable[KnxExposeOptions],
|
||||
config: ConfigType,
|
||||
) -> None:
|
||||
"""Initialize KnxExposeEntity class."""
|
||||
"""Initialize of Expose class."""
|
||||
self.hass = hass
|
||||
self.xknx = xknx
|
||||
self.entity_id = entity_id
|
||||
|
||||
self.entity_id: str = config[CONF_ENTITY_ID]
|
||||
self.expose_attribute: str | None = config.get(
|
||||
ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE
|
||||
)
|
||||
self.expose_default = config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT)
|
||||
self.expose_type: int | str = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
self.value_template: Template | None = config.get(CONF_VALUE_TEMPLATE)
|
||||
|
||||
self._remove_listener: Callable[[], None] | None = None
|
||||
self._exposures = tuple(
|
||||
(
|
||||
option,
|
||||
ExposeSensor(
|
||||
xknx=self.xknx,
|
||||
name=f"{self.entity_id} {option.attribute or 'state'}",
|
||||
group_address=option.group_address,
|
||||
respond_to_read=option.respond_to_read,
|
||||
value_type=option.dpt,
|
||||
cooldown=option.cooldown,
|
||||
),
|
||||
)
|
||||
for option in options
|
||||
self.device: ExposeSensor = ExposeSensor(
|
||||
xknx=self.xknx,
|
||||
name=f"{self.entity_id}__{self.expose_attribute or 'state'}",
|
||||
group_address=config[KNX_ADDRESS],
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
value_type=self.expose_type,
|
||||
cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN],
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of the expose entity."""
|
||||
expose_names = [opt.attribute or "state" for opt, _ in self._exposures]
|
||||
return f"{self.entity_id}__{'__'.join(expose_names)}"
|
||||
|
||||
@callback
|
||||
def async_register(self) -> None:
|
||||
"""Register listener and XKNX devices."""
|
||||
"""Register listener."""
|
||||
self._remove_listener = async_track_state_change_event(
|
||||
self.hass, [self.entity_id], self._async_entity_changed
|
||||
)
|
||||
for _option, xknx_expose in self._exposures:
|
||||
self.xknx.devices.async_add(xknx_expose)
|
||||
self.xknx.devices.async_add(self.device)
|
||||
self._init_expose_state()
|
||||
|
||||
@callback
|
||||
def _init_expose_state(self) -> None:
|
||||
"""Initialize state of all exposures."""
|
||||
"""Initialize state of the exposure."""
|
||||
init_state = self.hass.states.get(self.entity_id)
|
||||
for option, xknx_expose in self._exposures:
|
||||
state_value = self._get_expose_value(init_state, option)
|
||||
try:
|
||||
xknx_expose.sensor_value.value = state_value
|
||||
except ConversionError:
|
||||
_LOGGER.exception(
|
||||
"Error setting value %s for expose sensor %s",
|
||||
state_value,
|
||||
xknx_expose.name,
|
||||
)
|
||||
state_value = self._get_expose_value(init_state)
|
||||
try:
|
||||
self.device.sensor_value.value = state_value
|
||||
except ConversionError:
|
||||
_LOGGER.exception("Error during sending of expose sensor value")
|
||||
|
||||
@callback
|
||||
def async_remove(self) -> None:
|
||||
@@ -208,57 +121,53 @@ class KnxExposeEntity:
|
||||
if self._remove_listener is not None:
|
||||
self._remove_listener()
|
||||
self._remove_listener = None
|
||||
for _option, xknx_expose in self._exposures:
|
||||
self.xknx.devices.async_remove(xknx_expose)
|
||||
self.xknx.devices.async_remove(self.device)
|
||||
|
||||
def _get_expose_value(
|
||||
self, state: State | None, option: KnxExposeOptions
|
||||
) -> bool | int | float | str | None:
|
||||
"""Extract value from state for a specific option."""
|
||||
def _get_expose_value(self, state: State | None) -> bool | int | float | str | None:
|
||||
"""Extract value from state."""
|
||||
if state is None or state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
||||
if option.default is None:
|
||||
if self.expose_default is None:
|
||||
return None
|
||||
value = option.default
|
||||
elif option.attribute is not None:
|
||||
_attr = state.attributes.get(option.attribute)
|
||||
value = _attr if _attr is not None else option.default
|
||||
value = self.expose_default
|
||||
elif self.expose_attribute is not None:
|
||||
_attr = state.attributes.get(self.expose_attribute)
|
||||
value = _attr if _attr is not None else self.expose_default
|
||||
else:
|
||||
value = state.state
|
||||
|
||||
if option.value_template is not None:
|
||||
if self.value_template is not None:
|
||||
try:
|
||||
value = option.value_template.async_render_with_possible_json_value(
|
||||
value = self.value_template.async_render_with_possible_json_value(
|
||||
value, error_value=None
|
||||
)
|
||||
except (TemplateError, TypeError, ValueError) as err:
|
||||
_LOGGER.warning(
|
||||
"Error rendering value template for KNX expose %s %s %s: %s",
|
||||
self.entity_id,
|
||||
option.attribute or "state",
|
||||
option.value_template.template,
|
||||
"Error rendering value template for KNX expose %s %s: %s",
|
||||
self.device.name,
|
||||
self.value_template.template,
|
||||
err,
|
||||
)
|
||||
return None
|
||||
|
||||
if issubclass(option.dpt, DPT1BitEnum):
|
||||
if self.expose_type == "binary":
|
||||
if value in (1, STATE_ON, "True"):
|
||||
return True
|
||||
if value in (0, STATE_OFF, "False"):
|
||||
return False
|
||||
|
||||
# Handle numeric and string DPT conversions
|
||||
if value is not None:
|
||||
if value is not None and (
|
||||
isinstance(self.device.sensor_value, RemoteValueSensor)
|
||||
):
|
||||
try:
|
||||
if issubclass(option.dpt, DPTNumeric):
|
||||
if issubclass(self.device.sensor_value.dpt_class, DPTNumeric):
|
||||
return float(value)
|
||||
if issubclass(option.dpt, DPTString):
|
||||
if issubclass(self.device.sensor_value.dpt_class, DPTString):
|
||||
# DPT 16.000 only allows up to 14 Bytes
|
||||
return str(value)[:14]
|
||||
except (ValueError, TypeError) as err:
|
||||
_LOGGER.warning(
|
||||
'Could not expose %s %s value "%s" to KNX: Conversion failed: %s',
|
||||
self.entity_id,
|
||||
option.attribute or "state",
|
||||
self.expose_attribute or "state",
|
||||
value,
|
||||
err,
|
||||
)
|
||||
@@ -266,40 +175,32 @@ class KnxExposeEntity:
|
||||
return value # type: ignore[no-any-return]
|
||||
|
||||
async def _async_entity_changed(self, event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle entity change for all options."""
|
||||
"""Handle entity change."""
|
||||
new_state = event.data["new_state"]
|
||||
if (new_value := self._get_expose_value(new_state)) is None:
|
||||
return
|
||||
old_state = event.data["old_state"]
|
||||
# don't use default value for comparison on first state change (old_state is None)
|
||||
old_value = self._get_expose_value(old_state) if old_state is not None else None
|
||||
# don't send same value sequentially
|
||||
if new_value != old_value:
|
||||
await self._async_set_knx_value(new_value)
|
||||
|
||||
for option, xknx_expose in self._exposures:
|
||||
new_value = self._get_expose_value(new_state, option)
|
||||
if new_value is None:
|
||||
continue
|
||||
# Don't use default value for comparison on first state change
|
||||
old_value = (
|
||||
self._get_expose_value(old_state, option)
|
||||
if old_state is not None
|
||||
else None
|
||||
)
|
||||
# Don't send same value sequentially
|
||||
if new_value != old_value:
|
||||
await self._async_set_knx_value(xknx_expose, new_value)
|
||||
|
||||
async def _async_set_knx_value(
|
||||
self, xknx_expose: ExposeSensor, value: StateType
|
||||
) -> None:
|
||||
async def _async_set_knx_value(self, value: StateType) -> None:
|
||||
"""Set new value on xknx ExposeSensor."""
|
||||
try:
|
||||
await xknx_expose.set(value)
|
||||
await self.device.set(value)
|
||||
except ConversionError as err:
|
||||
_LOGGER.warning(
|
||||
'Could not expose %s value "%s" to KNX: %s',
|
||||
xknx_expose.name,
|
||||
'Could not expose %s %s value "%s" to KNX: %s',
|
||||
self.entity_id,
|
||||
self.expose_attribute or "state",
|
||||
value,
|
||||
err,
|
||||
)
|
||||
|
||||
|
||||
class KnxExposeTime:
|
||||
class KNXExposeTime:
|
||||
"""Object to Expose Time/Date object to KNX bus."""
|
||||
|
||||
def __init__(self, xknx: XKNX, config: ConfigType) -> None:
|
||||
@@ -321,11 +222,6 @@ class KnxExposeTime:
|
||||
group_address=config[KNX_ADDRESS],
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of the time expose object."""
|
||||
return f"expose_{self.device.name}"
|
||||
|
||||
@callback
|
||||
def async_register(self) -> None:
|
||||
"""Register listener."""
|
||||
|
||||
@@ -54,7 +54,7 @@ from .const import (
|
||||
TELEGRAM_LOG_DEFAULT,
|
||||
)
|
||||
from .device import KNXInterfaceDevice
|
||||
from .expose import KnxExposeEntity, KnxExposeTime
|
||||
from .expose import KNXExposeSensor, KNXExposeTime
|
||||
from .project import KNXProject
|
||||
from .repairs import data_secure_group_key_issue_dispatcher
|
||||
from .storage.config_store import KNXConfigStore
|
||||
@@ -73,8 +73,8 @@ class KNXModule:
|
||||
self.hass = hass
|
||||
self.config_yaml = config
|
||||
self.connected = False
|
||||
self.yaml_exposures: list[KnxExposeEntity | KnxExposeTime] = []
|
||||
self.service_exposures: dict[str, KnxExposeEntity | KnxExposeTime] = {}
|
||||
self.exposures: list[KNXExposeSensor | KNXExposeTime] = []
|
||||
self.service_exposures: dict[str, KNXExposeSensor | KNXExposeTime] = {}
|
||||
self.entry = entry
|
||||
|
||||
self.project = KNXProject(hass=hass, entry=entry)
|
||||
|
||||
@@ -193,7 +193,7 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
|
||||
" for '%s' - %s"
|
||||
),
|
||||
group_address,
|
||||
replaced_exposure.name,
|
||||
replaced_exposure.device.name,
|
||||
)
|
||||
replaced_exposure.async_remove()
|
||||
exposure = create_knx_exposure(knx_module.hass, knx_module.xknx, call.data)
|
||||
@@ -201,7 +201,7 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
|
||||
_LOGGER.debug(
|
||||
"Service exposure_register registered exposure for '%s' - %s",
|
||||
group_address,
|
||||
exposure.name,
|
||||
exposure.device.name,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["librehardwaremonitor-api==1.8.4"]
|
||||
"requirements": ["librehardwaremonitor-api==1.6.0"]
|
||||
}
|
||||
|
||||
@@ -19,12 +19,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .api import AsyncConfigEntryAuth
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
MieleAuxDataUpdateCoordinator,
|
||||
MieleConfigEntry,
|
||||
MieleDataUpdateCoordinator,
|
||||
MieleRuntimeData,
|
||||
)
|
||||
from .coordinator import MieleConfigEntry, MieleDataUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
@@ -80,23 +75,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: MieleConfigEntry) -> boo
|
||||
) from err
|
||||
|
||||
# Setup MieleAPI and coordinator for data fetch
|
||||
_api = MieleAPI(auth)
|
||||
_coordinator = MieleDataUpdateCoordinator(hass, entry, _api)
|
||||
await _coordinator.async_config_entry_first_refresh()
|
||||
_aux_coordinator = MieleAuxDataUpdateCoordinator(hass, entry, _api)
|
||||
await _aux_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = MieleRuntimeData(_api, _coordinator, _aux_coordinator)
|
||||
api = MieleAPI(auth)
|
||||
coordinator = MieleDataUpdateCoordinator(hass, entry, api)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
entry.async_create_background_task(
|
||||
hass,
|
||||
entry.runtime_data.api.listen_events(
|
||||
data_callback=_coordinator.callback_update_data,
|
||||
actions_callback=_coordinator.callback_update_actions,
|
||||
coordinator.api.listen_events(
|
||||
data_callback=coordinator.callback_update_data,
|
||||
actions_callback=coordinator.callback_update_actions,
|
||||
),
|
||||
"pymiele event listener",
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
@@ -116,5 +107,5 @@ async def async_remove_config_entry_device(
|
||||
identifier
|
||||
for identifier in device_entry.identifiers
|
||||
if identifier[0] == DOMAIN
|
||||
and identifier[1] in config_entry.runtime_data.coordinator.data.devices
|
||||
and identifier[1] in config_entry.runtime_data.data.devices
|
||||
)
|
||||
|
||||
@@ -264,7 +264,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the binary sensor platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set()
|
||||
|
||||
def _async_add_new_devices() -> None:
|
||||
|
||||
@@ -112,7 +112,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the button platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set()
|
||||
|
||||
def _async_add_new_devices() -> None:
|
||||
|
||||
@@ -138,7 +138,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the climate platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set()
|
||||
|
||||
def _async_add_new_devices() -> None:
|
||||
|
||||
@@ -9,13 +9,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from pymiele import (
|
||||
MieleAction,
|
||||
MieleAPI,
|
||||
MieleDevice,
|
||||
MieleFillingLevel,
|
||||
MieleFillingLevels,
|
||||
)
|
||||
from pymiele import MieleAction, MieleAPI, MieleDevice
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -26,16 +20,7 @@ from .const import DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MieleRuntimeData:
|
||||
"""Runtime data for the Miele integration."""
|
||||
|
||||
api: MieleAPI
|
||||
coordinator: MieleDataUpdateCoordinator
|
||||
aux_coordinator: MieleAuxDataUpdateCoordinator
|
||||
|
||||
|
||||
type MieleConfigEntry = ConfigEntry[MieleRuntimeData]
|
||||
type MieleConfigEntry = ConfigEntry[MieleDataUpdateCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -46,15 +31,8 @@ class MieleCoordinatorData:
|
||||
actions: dict[str, MieleAction]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MieleAuxCoordinatorData:
|
||||
"""Data class for storing auxiliary coordinator data."""
|
||||
|
||||
filling_levels: dict[str, MieleFillingLevel]
|
||||
|
||||
|
||||
class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
|
||||
"""Main coordinator for Miele data."""
|
||||
"""Coordinator for Miele data."""
|
||||
|
||||
config_entry: MieleConfigEntry
|
||||
new_device_callbacks: list[Callable[[dict[str, MieleDevice]], None]] = []
|
||||
@@ -88,7 +66,6 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
|
||||
}
|
||||
self.devices = devices
|
||||
actions = {}
|
||||
|
||||
for device_id in devices:
|
||||
try:
|
||||
actions_json = await self.api.get_actions(device_id)
|
||||
@@ -122,7 +99,10 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
|
||||
device_id: MieleDevice(device) for device_id, device in devices_json.items()
|
||||
}
|
||||
self.async_set_updated_data(
|
||||
MieleCoordinatorData(devices=devices, actions=self.data.actions)
|
||||
MieleCoordinatorData(
|
||||
devices=devices,
|
||||
actions=self.data.actions,
|
||||
)
|
||||
)
|
||||
|
||||
async def callback_update_actions(self, actions_json: dict[str, dict]) -> None:
|
||||
@@ -131,34 +111,8 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
|
||||
device_id: MieleAction(action) for device_id, action in actions_json.items()
|
||||
}
|
||||
self.async_set_updated_data(
|
||||
MieleCoordinatorData(devices=self.data.devices, actions=actions)
|
||||
)
|
||||
|
||||
|
||||
class MieleAuxDataUpdateCoordinator(DataUpdateCoordinator[MieleAuxCoordinatorData]):
|
||||
"""Coordinator for Miele data for slowly polled endpoints."""
|
||||
|
||||
config_entry: MieleConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: MieleConfigEntry,
|
||||
api: MieleAPI,
|
||||
) -> None:
|
||||
"""Initialize the Miele data coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=60),
|
||||
)
|
||||
self.api = api
|
||||
|
||||
async def _async_update_data(self) -> MieleAuxCoordinatorData:
|
||||
"""Fetch data from the Miele API."""
|
||||
filling_levels_json = await self.api.get_filling_levels()
|
||||
return MieleAuxCoordinatorData(
|
||||
filling_levels=MieleFillingLevels(filling_levels_json).filling_levels
|
||||
MieleCoordinatorData(
|
||||
devices=self.data.devices,
|
||||
actions=actions,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -38,19 +38,13 @@ async def async_get_config_entry_diagnostics(
|
||||
"devices": redact_identifiers(
|
||||
{
|
||||
device_id: device_data.raw
|
||||
for device_id, device_data in config_entry.runtime_data.coordinator.data.devices.items()
|
||||
}
|
||||
),
|
||||
"filling_levels": redact_identifiers(
|
||||
{
|
||||
device_id: filling_level_data.raw
|
||||
for device_id, filling_level_data in config_entry.runtime_data.aux_coordinator.data.filling_levels.items()
|
||||
for device_id, device_data in config_entry.runtime_data.data.devices.items()
|
||||
}
|
||||
),
|
||||
"actions": redact_identifiers(
|
||||
{
|
||||
device_id: action_data.raw
|
||||
for device_id, action_data in config_entry.runtime_data.coordinator.data.actions.items()
|
||||
for device_id, action_data in config_entry.runtime_data.data.actions.items()
|
||||
}
|
||||
),
|
||||
}
|
||||
@@ -74,19 +68,13 @@ async def async_get_device_diagnostics(
|
||||
"model_id": device.model_id,
|
||||
}
|
||||
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
aux_coordinator = config_entry.runtime_data.aux_coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
device_id = cast(str, device.serial_number)
|
||||
miele_data: dict[str, Any] = {
|
||||
"devices": {
|
||||
hash_identifier(device_id): coordinator.data.devices[device_id].raw
|
||||
},
|
||||
"filling_levels": {
|
||||
hash_identifier(device_id): aux_coordinator.data.filling_levels[
|
||||
device_id
|
||||
].raw
|
||||
},
|
||||
"actions": {
|
||||
hash_identifier(device_id): coordinator.data.actions[device_id].raw
|
||||
},
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
"""Entity base class for the Miele integration."""
|
||||
|
||||
from pymiele import MieleAction, MieleAPI, MieleDevice, MieleFillingLevel
|
||||
from pymiele import MieleAction, MieleAPI, MieleDevice
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DEVICE_TYPE_TAGS, DOMAIN, MANUFACTURER, MieleAppliance, StateStatus
|
||||
from .coordinator import MieleAuxDataUpdateCoordinator, MieleDataUpdateCoordinator
|
||||
from .coordinator import MieleDataUpdateCoordinator
|
||||
|
||||
|
||||
class MieleBaseEntity[
|
||||
_MieleCoordinatorT: MieleDataUpdateCoordinator | MieleAuxDataUpdateCoordinator
|
||||
](CoordinatorEntity[_MieleCoordinatorT]):
|
||||
class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
|
||||
"""Base class for Miele entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -24,7 +22,7 @@ class MieleBaseEntity[
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: _MieleCoordinatorT,
|
||||
coordinator: MieleDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
@@ -32,26 +30,7 @@ class MieleBaseEntity[
|
||||
super().__init__(coordinator)
|
||||
self._device_id = device_id
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = MieleBaseEntity.get_unique_id(device_id, description)
|
||||
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_id)})
|
||||
|
||||
@property
|
||||
def api(self) -> MieleAPI:
|
||||
"""Return the api object."""
|
||||
return self.coordinator.api
|
||||
|
||||
|
||||
class MieleEntity(MieleBaseEntity[MieleDataUpdateCoordinator]):
|
||||
"""Base class for Miele entities that use the main data coordinator."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MieleDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator, device_id, description)
|
||||
self._attr_unique_id = MieleEntity.get_unique_id(device_id, description)
|
||||
|
||||
device = self.device
|
||||
appliance_type = DEVICE_TYPE_TAGS.get(MieleAppliance(device.device_type))
|
||||
@@ -82,6 +61,11 @@ class MieleEntity(MieleBaseEntity[MieleDataUpdateCoordinator]):
|
||||
"""Return the actions object."""
|
||||
return self.coordinator.data.actions[self._device_id]
|
||||
|
||||
@property
|
||||
def api(self) -> MieleAPI:
|
||||
"""Return the api object."""
|
||||
return self.coordinator.api
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return the availability of the entity."""
|
||||
@@ -91,12 +75,3 @@ class MieleEntity(MieleBaseEntity[MieleDataUpdateCoordinator]):
|
||||
and self._device_id in self.coordinator.data.devices
|
||||
and (self.device.state_status is not StateStatus.not_connected)
|
||||
)
|
||||
|
||||
|
||||
class MieleAuxEntity(MieleBaseEntity[MieleAuxDataUpdateCoordinator]):
|
||||
"""Base class for Miele entities that use the auxiliary data coordinator."""
|
||||
|
||||
@property
|
||||
def levels(self) -> MieleFillingLevel:
|
||||
"""Return the filling levels object."""
|
||||
return self.coordinator.data.filling_levels[self._device_id]
|
||||
|
||||
@@ -66,7 +66,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the fan platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set()
|
||||
|
||||
def _async_add_new_devices() -> None:
|
||||
|
||||
@@ -71,9 +71,6 @@
|
||||
"plate_step_warming": "mdi:alpha-w-circle-outline"
|
||||
}
|
||||
},
|
||||
"power_disk_level": {
|
||||
"default": "mdi:car-coolant-level"
|
||||
},
|
||||
"program_id": {
|
||||
"default": "mdi:selection-ellipse-arrow-inside"
|
||||
},
|
||||
@@ -86,12 +83,6 @@
|
||||
"remaining_time": {
|
||||
"default": "mdi:clock-end"
|
||||
},
|
||||
"rinse_aid_level": {
|
||||
"default": "mdi:water-opacity"
|
||||
},
|
||||
"salt_level": {
|
||||
"default": "mdi:shaker-outline"
|
||||
},
|
||||
"spin_speed": {
|
||||
"default": "mdi:sync"
|
||||
},
|
||||
@@ -104,12 +95,6 @@
|
||||
"target_temperature": {
|
||||
"default": "mdi:thermometer-check"
|
||||
},
|
||||
"twin_dos_1_level": {
|
||||
"default": "mdi:car-coolant-level"
|
||||
},
|
||||
"twin_dos_2_level": {
|
||||
"default": "mdi:car-coolant-level"
|
||||
},
|
||||
"water_forecast": {
|
||||
"default": "mdi:water-outline"
|
||||
}
|
||||
|
||||
@@ -86,7 +86,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the light platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set()
|
||||
|
||||
def _async_add_new_devices() -> None:
|
||||
|
||||
@@ -71,7 +71,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the select platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set()
|
||||
|
||||
def _async_add_new_devices() -> None:
|
||||
|
||||
@@ -8,7 +8,7 @@ from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
||||
from pymiele import MieleDevice, MieleFillingLevel, MieleTemperature
|
||||
from pymiele import MieleDevice, MieleTemperature
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
RestoreSensor,
|
||||
@@ -44,12 +44,8 @@ from .const import (
|
||||
StateProgramType,
|
||||
StateStatus,
|
||||
)
|
||||
from .coordinator import (
|
||||
MieleAuxDataUpdateCoordinator,
|
||||
MieleConfigEntry,
|
||||
MieleDataUpdateCoordinator,
|
||||
)
|
||||
from .entity import MieleAuxEntity, MieleEntity
|
||||
from .coordinator import MieleConfigEntry, MieleDataUpdateCoordinator
|
||||
from .entity import MieleEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -143,13 +139,10 @@ def _convert_finish_timestamp(
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class MieleSensorDescription[T: (MieleDevice, MieleFillingLevel)](
|
||||
SensorEntityDescription
|
||||
):
|
||||
class MieleSensorDescription(SensorEntityDescription):
|
||||
"""Class describing Miele sensor entities."""
|
||||
|
||||
value_fn: Callable[[T], StateType | datetime]
|
||||
|
||||
value_fn: Callable[[MieleDevice], StateType | datetime]
|
||||
end_value_fn: Callable[[StateType | datetime], StateType | datetime] | None = None
|
||||
extra_attributes: dict[str, Callable[[MieleDevice], StateType]] | None = None
|
||||
zone: int | None = None
|
||||
@@ -157,14 +150,14 @@ class MieleSensorDescription[T: (MieleDevice, MieleFillingLevel)](
|
||||
|
||||
|
||||
@dataclass
|
||||
class MieleSensorDefinition[T: (MieleDevice, MieleFillingLevel)]:
|
||||
class MieleSensorDefinition:
|
||||
"""Class for defining sensor entities."""
|
||||
|
||||
types: tuple[MieleAppliance, ...]
|
||||
description: MieleSensorDescription[T]
|
||||
description: MieleSensorDescription
|
||||
|
||||
|
||||
SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleDevice], ...]] = (
|
||||
SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
@@ -696,59 +689,6 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleDevice], ...]] = (
|
||||
),
|
||||
)
|
||||
|
||||
POLLED_SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleFillingLevel], ...]] = (
|
||||
MieleSensorDefinition(
|
||||
types=(MieleAppliance.WASHING_MACHINE,),
|
||||
description=MieleSensorDescription[MieleFillingLevel](
|
||||
key="twin_dos_1_level",
|
||||
translation_key="twin_dos_1_level",
|
||||
value_fn=lambda value: value.twin_dos_container_1_filling_level,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(MieleAppliance.WASHING_MACHINE,),
|
||||
description=MieleSensorDescription[MieleFillingLevel](
|
||||
key="twin_dos_2_level",
|
||||
translation_key="twin_dos_2_level",
|
||||
value_fn=lambda value: value.twin_dos_container_2_filling_level,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(MieleAppliance.DISHWASHER,),
|
||||
description=MieleSensorDescription[MieleFillingLevel](
|
||||
key="power_disk_level",
|
||||
translation_key="power_disk_level",
|
||||
value_fn=lambda value: None,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(MieleAppliance.DISHWASHER,),
|
||||
description=MieleSensorDescription[MieleFillingLevel](
|
||||
key="salt_level",
|
||||
translation_key="salt_level",
|
||||
value_fn=lambda value: value.salt_filling_level,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(MieleAppliance.DISHWASHER,),
|
||||
description=MieleSensorDescription[MieleFillingLevel](
|
||||
key="rinse_aid_level",
|
||||
translation_key="rinse_aid_level",
|
||||
value_fn=lambda value: value.rinse_aid_filling_level,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -756,14 +696,11 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
aux_coordinator = config_entry.runtime_data.aux_coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set() # device_id
|
||||
added_entities: set[str] = set() # unique_id
|
||||
|
||||
def _get_entity_class(
|
||||
definition: MieleSensorDefinition[MieleDevice],
|
||||
) -> type[MieleSensor]:
|
||||
def _get_entity_class(definition: MieleSensorDefinition) -> type[MieleSensor]:
|
||||
"""Get the entity class for the sensor."""
|
||||
return {
|
||||
"state_status": MieleStatusSensor,
|
||||
@@ -788,7 +725,7 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
def _is_sensor_enabled(
|
||||
definition: MieleSensorDefinition[MieleDevice],
|
||||
definition: MieleSensorDefinition,
|
||||
device: MieleDevice,
|
||||
unique_id: str,
|
||||
) -> bool:
|
||||
@@ -811,15 +748,6 @@ async def async_setup_entry(
|
||||
return False
|
||||
return True
|
||||
|
||||
def _enabled_aux_sensor(
|
||||
definition: MieleSensorDefinition[MieleFillingLevel], level: MieleFillingLevel
|
||||
) -> bool:
|
||||
"""Check if aux sensors are enabled."""
|
||||
return not (
|
||||
definition.description.value_fn is not None
|
||||
and definition.description.value_fn(level) is None
|
||||
)
|
||||
|
||||
def _async_add_devices() -> None:
|
||||
nonlocal added_devices, added_entities
|
||||
entities: list = []
|
||||
@@ -847,11 +775,7 @@ async def async_setup_entry(
|
||||
continue
|
||||
|
||||
# sensors is not enabled, skip
|
||||
if not _is_sensor_enabled(
|
||||
definition,
|
||||
device,
|
||||
unique_id,
|
||||
):
|
||||
if not _is_sensor_enabled(definition, device, unique_id):
|
||||
continue
|
||||
|
||||
added_entities.add(unique_id)
|
||||
@@ -863,15 +787,6 @@ async def async_setup_entry(
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_async_add_devices))
|
||||
_async_add_devices()
|
||||
|
||||
async_add_entities(
|
||||
MieleAuxSensor(aux_coordinator, device_id, definition.description)
|
||||
for device_id in aux_coordinator.data.filling_levels
|
||||
for definition in POLLED_SENSOR_TYPES
|
||||
if _enabled_aux_sensor(
|
||||
definition, aux_coordinator.data.filling_levels[device_id]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
APPLIANCE_ICONS = {
|
||||
MieleAppliance.WASHING_MACHINE: "mdi:washing-machine",
|
||||
@@ -970,32 +885,6 @@ class MieleRestorableSensor(MieleSensor, RestoreSensor):
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
|
||||
class MieleAuxSensor(MieleAuxEntity, SensorEntity):
|
||||
"""Representation of a filling level Sensor."""
|
||||
|
||||
entity_description: MieleSensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MieleAuxDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
description: MieleSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, device_id, description)
|
||||
if description.unique_id_fn is not None:
|
||||
self._attr_unique_id = description.unique_id_fn(device_id, description)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the level sensor."""
|
||||
return (
|
||||
self.entity_description.value_fn(self.levels)
|
||||
if self.entity_description.value_fn is not None
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
class MielePlateSensor(MieleSensor):
|
||||
"""Representation of a Sensor."""
|
||||
|
||||
|
||||
@@ -257,9 +257,6 @@
|
||||
"plate_step_warm": "Warming"
|
||||
}
|
||||
},
|
||||
"power_disk_level": {
|
||||
"name": "PowerDisk level"
|
||||
},
|
||||
"program_id": {
|
||||
"name": "Program",
|
||||
"state": {
|
||||
@@ -1041,12 +1038,6 @@
|
||||
"remaining_time": {
|
||||
"name": "Remaining time"
|
||||
},
|
||||
"rinse_aid_level": {
|
||||
"name": "Rinse aid level"
|
||||
},
|
||||
"salt_level": {
|
||||
"name": "Salt level"
|
||||
},
|
||||
"spin_speed": {
|
||||
"name": "Spin speed"
|
||||
},
|
||||
@@ -1089,12 +1080,6 @@
|
||||
"temperature_zone_3": {
|
||||
"name": "Temperature zone 3"
|
||||
},
|
||||
"twin_dos_1_level": {
|
||||
"name": "TwinDos 1 level"
|
||||
},
|
||||
"twin_dos_2_level": {
|
||||
"name": "TwinDos 2 level"
|
||||
},
|
||||
"water_consumption": {
|
||||
"name": "Water consumption"
|
||||
},
|
||||
|
||||
@@ -117,7 +117,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the switch platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set()
|
||||
|
||||
def _async_add_new_devices() -> None:
|
||||
|
||||
@@ -128,7 +128,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the vacuum platform."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
MieleVacuum(coordinator, device_id, definition.description)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["nextdns"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["nextdns==5.0.0"]
|
||||
"requirements": ["nextdns==4.1.0"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.21.0"]
|
||||
"requirements": ["nibe==2.20.0"]
|
||||
}
|
||||
|
||||
@@ -28,15 +28,9 @@
|
||||
"exchange_rate": {
|
||||
"default": "mdi:currency-usd"
|
||||
},
|
||||
"highest_price": {
|
||||
"default": "mdi:cash-plus"
|
||||
},
|
||||
"last_price": {
|
||||
"default": "mdi:cash"
|
||||
},
|
||||
"lowest_price": {
|
||||
"default": "mdi:cash-minus"
|
||||
},
|
||||
"next_price": {
|
||||
"default": "mdi:cash"
|
||||
},
|
||||
|
||||
@@ -1,30 +1 @@
|
||||
"""The OpenEVSE integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from openevsehttp.__main__ import OpenEVSE
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
|
||||
type OpenEVSEConfigEntry = ConfigEntry[OpenEVSE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OpenEVSEConfigEntry) -> bool:
|
||||
"""Set up openevse from a config entry."""
|
||||
|
||||
entry.runtime_data = OpenEVSE(entry.data[CONF_HOST])
|
||||
try:
|
||||
await entry.runtime_data.test_and_get()
|
||||
except TimeoutError as ex:
|
||||
raise ConfigEntryError("Unable to connect to charger") from ex
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR])
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, [Platform.SENSOR])
|
||||
"""The openevse component."""
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
"""Config flow for OpenEVSE integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from openevsehttp.__main__ import OpenEVSE
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME
|
||||
from homeassistant.helpers.service_info import zeroconf
|
||||
|
||||
from .const import CONF_ID, CONF_SERIAL, DOMAIN
|
||||
|
||||
|
||||
class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""OpenEVSE config flow."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Set up the instance."""
|
||||
self.discovery_info: dict[str, Any] = {}
|
||||
|
||||
async def check_status(self, host: str) -> tuple[bool, str | None]:
|
||||
"""Check if we can connect to the OpenEVSE charger."""
|
||||
|
||||
charger = OpenEVSE(host)
|
||||
try:
|
||||
result = await charger.test_and_get()
|
||||
except TimeoutError:
|
||||
return False, None
|
||||
return True, result.get(CONF_SERIAL)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
|
||||
if (result := await self.check_status(user_input[CONF_HOST]))[0]:
|
||||
if (serial := result[1]) is not None:
|
||||
await self.async_set_unique_id(serial, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"OpenEVSE {user_input[CONF_HOST]}",
|
||||
data=user_input,
|
||||
)
|
||||
errors = {CONF_HOST: "cannot_connect"}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema({vol.Required(CONF_HOST): str}),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_import(self, data: dict[str, str]) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: data[CONF_HOST]})
|
||||
|
||||
if (result := await self.check_status(data[CONF_HOST]))[0]:
|
||||
if (serial := result[1]) is not None:
|
||||
await self.async_set_unique_id(serial)
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
return self.async_abort(reason="unavailable_host")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"OpenEVSE {data[CONF_HOST]}",
|
||||
data=data,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
self._async_abort_entries_match({CONF_HOST: discovery_info.host})
|
||||
|
||||
await self.async_set_unique_id(discovery_info.properties[CONF_ID])
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host})
|
||||
|
||||
host = discovery_info.host
|
||||
name = f"OpenEVSE {discovery_info.name.split('.')[0]}"
|
||||
self.discovery_info.update(
|
||||
{
|
||||
CONF_HOST: host,
|
||||
CONF_NAME: name,
|
||||
}
|
||||
)
|
||||
self.context.update({"title_placeholders": {"name": name}})
|
||||
|
||||
if not (await self.check_status(host))[0]:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
description_placeholders={"name": self.discovery_info[CONF_NAME]},
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.discovery_info[CONF_NAME],
|
||||
data={CONF_HOST: self.discovery_info[CONF_HOST]},
|
||||
)
|
||||
@@ -1,6 +0,0 @@
|
||||
"""Constants for the OpenEVSE integration."""
|
||||
|
||||
CONF_ID = "id"
|
||||
CONF_SERIAL = "serial"
|
||||
DOMAIN = "openevse"
|
||||
INTEGRATION_TITLE = "OpenEVSE"
|
||||
@@ -1,14 +1,10 @@
|
||||
{
|
||||
"domain": "openevse",
|
||||
"name": "OpenEVSE",
|
||||
"after_dependencies": ["zeroconf"],
|
||||
"codeowners": ["@c00w", "@firstof9"],
|
||||
"config_flow": true,
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/openevse",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["openevsehttp"],
|
||||
"loggers": ["openevsewifi"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["python-openevse-http==0.2.1"],
|
||||
"zeroconf": ["_openevse._tcp.local."]
|
||||
"requirements": ["openevsewifi==1.1.2"]
|
||||
}
|
||||
|
||||
@@ -4,18 +4,17 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from openevsehttp.__main__ import OpenEVSE
|
||||
import openevsewifi
|
||||
from requests import RequestException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_MONITORED_VARIABLES,
|
||||
@@ -24,17 +23,10 @@ from homeassistant.const import (
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import ConfigEntry
|
||||
from .const import DOMAIN, INTEGRATION_TITLE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
@@ -62,7 +54,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="rtc_temp",
|
||||
@@ -70,7 +61,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="usage_session",
|
||||
@@ -100,110 +90,54 @@ PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the openevse platform."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config,
|
||||
)
|
||||
"""Set up the OpenEVSE sensor."""
|
||||
host = config[CONF_HOST]
|
||||
monitored_variables = config[CONF_MONITORED_VARIABLES]
|
||||
|
||||
if (
|
||||
result.get("type") is FlowResultType.ABORT
|
||||
and result.get("reason") != "already_configured"
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_{result.get('reason')}",
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_{result.get('reason')}",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
return
|
||||
charger = openevsewifi.Charger(host)
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
"deprecated_yaml",
|
||||
breaks_in_ha_version="2026.7.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
entities = [
|
||||
OpenEVSESensor(charger, description)
|
||||
for description in SENSOR_TYPES
|
||||
if description.key in monitored_variables
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add sensors for passed config_entry in HA."""
|
||||
async_add_entities(
|
||||
(
|
||||
OpenEVSESensor(
|
||||
config_entry.data[CONF_HOST],
|
||||
config_entry.runtime_data,
|
||||
description,
|
||||
)
|
||||
for description in SENSOR_TYPES
|
||||
),
|
||||
True,
|
||||
)
|
||||
add_entities(entities, True)
|
||||
|
||||
|
||||
class OpenEVSESensor(SensorEntity):
|
||||
"""Implementation of an OpenEVSE sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
host: str,
|
||||
charger: OpenEVSE,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
def __init__(self, charger, description: SensorEntityDescription) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = description
|
||||
self.host = host
|
||||
self.charger = charger
|
||||
|
||||
async def async_update(self) -> None:
|
||||
def update(self) -> None:
|
||||
"""Get the monitored data from the charger."""
|
||||
try:
|
||||
await self.charger.update()
|
||||
except TimeoutError:
|
||||
sensor_type = self.entity_description.key
|
||||
if sensor_type == "status":
|
||||
self._attr_native_value = self.charger.getStatus()
|
||||
elif sensor_type == "charge_time":
|
||||
self._attr_native_value = self.charger.getChargeTimeElapsed() / 60
|
||||
elif sensor_type == "ambient_temp":
|
||||
self._attr_native_value = self.charger.getAmbientTemperature()
|
||||
elif sensor_type == "ir_temp":
|
||||
self._attr_native_value = self.charger.getIRTemperature()
|
||||
elif sensor_type == "rtc_temp":
|
||||
self._attr_native_value = self.charger.getRTCTemperature()
|
||||
elif sensor_type == "usage_session":
|
||||
self._attr_native_value = float(self.charger.getUsageSession()) / 1000
|
||||
elif sensor_type == "usage_total":
|
||||
self._attr_native_value = float(self.charger.getUsageTotal()) / 1000
|
||||
else:
|
||||
self._attr_native_value = "Unknown"
|
||||
except (RequestException, ValueError, KeyError):
|
||||
_LOGGER.warning("Could not update status for %s", self.name)
|
||||
return
|
||||
|
||||
sensor_type = self.entity_description.key
|
||||
if sensor_type == "status":
|
||||
self._attr_native_value = self.charger.status
|
||||
elif sensor_type == "charge_time":
|
||||
self._attr_native_value = self.charger.charge_time_elapsed / 60
|
||||
elif sensor_type == "ambient_temp":
|
||||
self._attr_native_value = self.charger.ambient_temperature
|
||||
elif sensor_type == "ir_temp":
|
||||
self._attr_native_value = self.charger.ir_temperature
|
||||
elif sensor_type == "rtc_temp":
|
||||
self._attr_native_value = self.charger.rtc_temperature
|
||||
elif sensor_type == "usage_session":
|
||||
self._attr_native_value = float(self.charger.usage_session) / 1000
|
||||
elif sensor_type == "usage_total":
|
||||
self._attr_native_value = float(self.charger.usage_total) / 1000
|
||||
else:
|
||||
self._attr_native_value = "Unknown"
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "This charger is already configured",
|
||||
"unavailable_host": "Unable to connect to host"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Unable to connect"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "Enter the IP address of your OpenEVSE. Should match the address you used to set it up."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"yaml_deprecated": {
|
||||
"description": "Configuring OpenEVSE using YAML is being removed. Your existing YAML configuration has been imported into the UI automatically. Remove the `openevse` configuration from your configuration.yaml file and restart Home Assistant to fix this issue.",
|
||||
"title": "OpenEVSE YAML configuration is deprecated"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -61,10 +61,7 @@ async def async_get_device_diagnostics(
|
||||
data["execution_history"] = [
|
||||
repr(execution)
|
||||
for execution in await client.get_execution_history()
|
||||
if any(
|
||||
command.device_url.split("#", 1)[0] == device_url.split("#", 1)[0]
|
||||
for command in execution.commands
|
||||
)
|
||||
if any(command.device_url == device_url for command in execution.commands)
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
||||
"requirements": ["pyoverkiz==1.19.4"],
|
||||
"requirements": ["pyoverkiz==1.19.3"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "gateway*",
|
||||
|
||||
@@ -11,13 +11,5 @@
|
||||
"reload": {
|
||||
"service": "mdi:reload"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"entered_home": {
|
||||
"trigger": "mdi:account-arrow-left"
|
||||
},
|
||||
"left_home": {
|
||||
"trigger": "mdi:account-arrow-right"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted persons to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::person::title%]",
|
||||
@@ -29,42 +25,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"description": "Reloads persons from the YAML-configuration.",
|
||||
"name": "[%key:common::action::reload%]"
|
||||
}
|
||||
},
|
||||
"title": "Person",
|
||||
"triggers": {
|
||||
"entered_home": {
|
||||
"description": "Triggers when one or more persons enter home.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::person::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::person::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Entered home"
|
||||
},
|
||||
"left_home": {
|
||||
"description": "Triggers when one or more persons leave home.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::person::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::person::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Left home"
|
||||
}
|
||||
}
|
||||
"title": "Person"
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"""Provides triggers for persons."""
|
||||
|
||||
from homeassistant.const import STATE_HOME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_origin_state_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"entered_home": make_entity_target_state_trigger(DOMAIN, STATE_HOME),
|
||||
"left_home": make_entity_origin_state_trigger(DOMAIN, from_state=STATE_HOME),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for persons."""
|
||||
return TRIGGERS
|
||||
@@ -1,18 +0,0 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: person
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
entered_home: *trigger_common
|
||||
left_home: *trigger_common
|
||||
@@ -75,15 +75,6 @@
|
||||
},
|
||||
{
|
||||
"macaddress": "84E657*"
|
||||
},
|
||||
{
|
||||
"hostname": "ps5-*"
|
||||
},
|
||||
{
|
||||
"hostname": "ps4-*"
|
||||
},
|
||||
{
|
||||
"hostname": "ps3"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/playstation_network",
|
||||
|
||||
@@ -114,72 +114,32 @@ class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if user_input is not None:
|
||||
host = user_input[CONF_HOST]
|
||||
serial_number, api_versions, errors = await self._validate_host(host)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors=errors,
|
||||
# Handle API version info for error display; pass version info when available
|
||||
# or None when api_versions is None to avoid displaying version details
|
||||
description_placeholders={
|
||||
"api_version_is": api_versions.get("api_version_is") or "",
|
||||
"api_version_should": api_versions.get("api_version_should")
|
||||
or "",
|
||||
}
|
||||
if api_versions
|
||||
else None,
|
||||
)
|
||||
|
||||
await self.async_set_unique_id(serial_number, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"PoolDose {serial_number}",
|
||||
data={CONF_HOST: host},
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfigure to change the device host/IP for an existing entry."""
|
||||
if user_input is not None:
|
||||
host = user_input[CONF_HOST]
|
||||
serial_number, api_versions, errors = await self._validate_host(host)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors=errors,
|
||||
# Handle API version info for error display identical to other steps
|
||||
description_placeholders={
|
||||
"api_version_is": api_versions.get("api_version_is") or "",
|
||||
"api_version_should": api_versions.get("api_version_should")
|
||||
or "",
|
||||
}
|
||||
if api_versions
|
||||
else None,
|
||||
)
|
||||
|
||||
# Ensure new serial number matches the existing entry unique_id (serial number)
|
||||
if serial_number != self._get_reconfigure_entry().unique_id:
|
||||
return self.async_abort(reason="wrong_device")
|
||||
|
||||
# Update the existing config entry with the new host and schedule reload
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(), data_updates={CONF_HOST: host}
|
||||
host = user_input[CONF_HOST]
|
||||
serial_number, api_versions, errors = await self._validate_host(host)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors=errors,
|
||||
# Handle API version info for error display; pass version info when available
|
||||
# or None when api_versions is None to avoid displaying version details
|
||||
description_placeholders={
|
||||
"api_version_is": api_versions.get("api_version_is") or "",
|
||||
"api_version_should": api_versions.get("api_version_should") or "",
|
||||
}
|
||||
if api_versions
|
||||
else None,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
# Pre-fill with current host from the entry being reconfigured
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
SCHEMA_DEVICE, self._get_reconfigure_entry().data
|
||||
),
|
||||
await self.async_set_unique_id(serial_number, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"PoolDose {serial_number}",
|
||||
data={CONF_HOST: host},
|
||||
)
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
"""Diagnostics support for Pooldose."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import PooldoseConfigEntry
|
||||
|
||||
TO_REDACT = {
|
||||
"IP",
|
||||
"MAC",
|
||||
"WIFI_SSID",
|
||||
"AP_SSID",
|
||||
"SERIAL_NUMBER",
|
||||
"DEVICE_ID",
|
||||
"OWNERID",
|
||||
"NAME",
|
||||
"GROUPNAME",
|
||||
}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: PooldoseConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
return {
|
||||
"device_info": async_redact_data(coordinator.device_info, TO_REDACT),
|
||||
"data": coordinator.data,
|
||||
}
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
@@ -53,20 +53,20 @@ rules:
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration does not support dynamic device discovery, as each config entry represents a single PoolDose device with all available entities.
|
||||
comment: This integration does not support dynamic devices, as it is designed for a single PoolDose device.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration does not have any identified cases where repair issues would be needed.
|
||||
comment: This integration does not provide repair issues, as it is designed for a single PoolDose device with a fixed configuration.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: This integration manages a single device per config entry, so stale device removal is not applicable.
|
||||
comment: This integration does not support stale devices, as it is designed for a single PoolDose device with a fixed configuration.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_device_info": "Unable to retrieve device information",
|
||||
"no_serial_number": "No serial number found on the device",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_device": "The provided device does not match the configured device"
|
||||
"no_serial_number": "No serial number found on the device"
|
||||
},
|
||||
"error": {
|
||||
"api_not_set": "API version not found in device response. Device firmware may not be compatible with this integration.",
|
||||
@@ -22,14 +20,6 @@
|
||||
"description": "A PoolDose device was found on your network at {ip} with MAC address {mac}.\n\nDo you want to add {name} to Home Assistant?",
|
||||
"title": "Confirm DHCP discovered PoolDose device"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::pooldose::config::step::user::data_description::host%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
|
||||
@@ -164,11 +164,7 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.endpoint_id in self.coordinator.data
|
||||
and self.device_name in self.coordinator.data[self.endpoint_id].containers
|
||||
)
|
||||
return super().available and self.endpoint_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
|
||||
@@ -113,9 +113,7 @@ class PortainerButton(PortainerContainerEntity, ButtonEntity):
|
||||
"""Trigger the Portainer button press service."""
|
||||
try:
|
||||
await self.entity_description.press_action(
|
||||
self.coordinator.portainer,
|
||||
self.endpoint_id,
|
||||
self.container_data.container.id,
|
||||
self.coordinator.portainer, self.endpoint_id, self.device_id
|
||||
)
|
||||
except PortainerConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
|
||||
@@ -50,7 +50,7 @@ class PortainerContainerData:
|
||||
"""Container data held by the Portainer coordinator."""
|
||||
|
||||
container: DockerContainer
|
||||
stats: DockerContainerStats | None
|
||||
stats: DockerContainerStats
|
||||
stats_pre: DockerContainerStats | None
|
||||
|
||||
|
||||
@@ -147,52 +147,47 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
docker_version = await self.portainer.docker_version(endpoint.id)
|
||||
docker_info = await self.portainer.docker_info(endpoint.id)
|
||||
|
||||
prev_endpoint = self.data.get(endpoint.id) if self.data else None
|
||||
container_map: dict[str, PortainerContainerData] = {}
|
||||
|
||||
# Map containers, started and stopped
|
||||
for container in containers:
|
||||
container_name = self._get_container_name(container.names[0])
|
||||
prev_container = (
|
||||
prev_endpoint.containers[container_name]
|
||||
if prev_endpoint
|
||||
else None
|
||||
container_stats_task = [
|
||||
(
|
||||
container,
|
||||
self.portainer.container_stats(
|
||||
endpoint_id=endpoint.id,
|
||||
container_id=container.id,
|
||||
),
|
||||
)
|
||||
container_map[container_name] = PortainerContainerData(
|
||||
container=container,
|
||||
stats=None,
|
||||
stats_pre=prev_container.stats if prev_container else None,
|
||||
)
|
||||
|
||||
# Separately fetch stats for running containers
|
||||
running_containers = [
|
||||
container
|
||||
for container in containers
|
||||
if container.state == CONTAINER_STATE_RUNNING
|
||||
]
|
||||
if running_containers:
|
||||
container_stats = dict(
|
||||
zip(
|
||||
(
|
||||
self._get_container_name(container.names[0])
|
||||
for container in running_containers
|
||||
),
|
||||
await asyncio.gather(
|
||||
*(
|
||||
self.portainer.container_stats(
|
||||
endpoint_id=endpoint.id,
|
||||
container_id=container.id,
|
||||
)
|
||||
for container in running_containers
|
||||
)
|
||||
),
|
||||
strict=False,
|
||||
)
|
||||
)
|
||||
|
||||
# Now assign stats to the containers
|
||||
for container_name, stats in container_stats.items():
|
||||
container_map[container_name].stats = stats
|
||||
container_stats_gather = await asyncio.gather(
|
||||
*[task for _, task in container_stats_task]
|
||||
)
|
||||
for (container, _), container_stats in zip(
|
||||
container_stats_task, container_stats_gather, strict=False
|
||||
):
|
||||
container_name = container.names[0].replace("/", " ").strip()
|
||||
|
||||
# Store previous stats if available. This is used to calculate deltas for CPU and network usage
|
||||
# In the first call it will be None, since it has nothing to compare with
|
||||
# Added a walrus pattern to check if not None on prev_container, to keep mypy happy. :)
|
||||
container_map[container_name] = PortainerContainerData(
|
||||
container=container,
|
||||
stats=container_stats,
|
||||
stats_pre=(
|
||||
prev_container.stats
|
||||
if self.data
|
||||
and (prev_data := self.data.get(endpoint.id)) is not None
|
||||
and (
|
||||
prev_container := prev_data.containers.get(
|
||||
container_name
|
||||
)
|
||||
)
|
||||
is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
except PortainerConnectionError as err:
|
||||
_LOGGER.exception("Connection error")
|
||||
raise UpdateFailed(
|
||||
@@ -233,15 +228,11 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
|
||||
# Surprise, we also handle containers here :)
|
||||
current_containers = {
|
||||
(endpoint.id, container_name)
|
||||
(endpoint.id, container.container.id)
|
||||
for endpoint in mapped_endpoints.values()
|
||||
for container_name in endpoint.containers
|
||||
for container in endpoint.containers.values()
|
||||
}
|
||||
new_containers = current_containers - self.known_containers
|
||||
if new_containers:
|
||||
_LOGGER.debug("New containers found: %s", new_containers)
|
||||
self.known_containers.update(new_containers)
|
||||
|
||||
def _get_container_name(self, container_name: str) -> str:
|
||||
"""Sanitize to get a proper container name."""
|
||||
return container_name.replace("/", " ").strip()
|
||||
|
||||
@@ -7,9 +7,6 @@
|
||||
"architecture": {
|
||||
"default": "mdi:cpu-64-bit"
|
||||
},
|
||||
"container_state": {
|
||||
"default": "mdi:state-machine"
|
||||
},
|
||||
"containers_count": {
|
||||
"default": "mdi:database"
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.22"]
|
||||
"requirements": ["pyportainer==1.0.19"]
|
||||
}
|
||||
|
||||
@@ -49,19 +49,10 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
|
||||
translation_key="image",
|
||||
value_fn=lambda data: data.container.image,
|
||||
),
|
||||
PortainerContainerSensorEntityDescription(
|
||||
key="container_state",
|
||||
translation_key="container_state",
|
||||
value_fn=lambda data: data.container.state,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["running", "exited", "paused", "restarting", "created", "dead"],
|
||||
),
|
||||
PortainerContainerSensorEntityDescription(
|
||||
key="memory_limit",
|
||||
translation_key="memory_limit",
|
||||
value_fn=lambda data: (
|
||||
data.stats.memory_stats.limit if data.stats is not None else 0
|
||||
),
|
||||
value_fn=lambda data: data.stats.memory_stats.limit,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.MEGABYTES,
|
||||
@@ -72,9 +63,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
|
||||
PortainerContainerSensorEntityDescription(
|
||||
key="memory_usage",
|
||||
translation_key="memory_usage",
|
||||
value_fn=lambda data: (
|
||||
data.stats.memory_stats.usage if data.stats is not None else 0
|
||||
),
|
||||
value_fn=lambda data: data.stats.memory_stats.usage,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.MEGABYTES,
|
||||
@@ -87,9 +76,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
|
||||
translation_key="memory_usage_percentage",
|
||||
value_fn=lambda data: (
|
||||
(data.stats.memory_stats.usage / data.stats.memory_stats.limit) * 100.0
|
||||
if data.stats is not None
|
||||
and data.stats.memory_stats.limit > 0
|
||||
and data.stats.memory_stats.usage > 0
|
||||
if data.stats.memory_stats.limit > 0 and data.stats.memory_stats.usage > 0
|
||||
else 0.0
|
||||
),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
@@ -102,8 +89,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
|
||||
translation_key="cpu_usage_total",
|
||||
value_fn=lambda data: (
|
||||
(total_delta / system_delta) * data.stats.cpu_stats.online_cpus * 100.0
|
||||
if data.stats is not None
|
||||
and (prev := data.stats_pre) is not None
|
||||
if (prev := data.stats_pre) is not None
|
||||
and (
|
||||
system_delta := (
|
||||
data.stats.cpu_stats.system_cpu_usage
|
||||
@@ -261,6 +247,7 @@ async def async_setup_entry(
|
||||
)
|
||||
for (endpoint, container) in containers
|
||||
for entity_description in CONTAINER_SENSORS
|
||||
if entity_description.value_fn(container) is not None
|
||||
)
|
||||
|
||||
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
|
||||
@@ -303,11 +290,7 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.endpoint_id in self.coordinator.data
|
||||
and self.device_name in self.coordinator.data[self.endpoint_id].containers
|
||||
)
|
||||
return super().available and self.endpoint_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
|
||||
@@ -68,17 +68,6 @@
|
||||
"architecture": {
|
||||
"name": "Architecture"
|
||||
},
|
||||
"container_state": {
|
||||
"name": "State",
|
||||
"state": {
|
||||
"created": "Created",
|
||||
"dead": "Dead",
|
||||
"exited": "Exited",
|
||||
"paused": "Paused",
|
||||
"restarting": "Restarting",
|
||||
"running": "Running"
|
||||
}
|
||||
},
|
||||
"containers_count": {
|
||||
"name": "Container count"
|
||||
},
|
||||
|
||||
@@ -137,19 +137,13 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Start (turn on) the container."""
|
||||
await self.entity_description.turn_on_fn(
|
||||
"start",
|
||||
self.coordinator.portainer,
|
||||
self.endpoint_id,
|
||||
self.container_data.container.id,
|
||||
"start", self.coordinator.portainer, self.endpoint_id, self.device_id
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Stop (turn off) the container."""
|
||||
await self.entity_description.turn_off_fn(
|
||||
"stop",
|
||||
self.coordinator.portainer,
|
||||
self.endpoint_id,
|
||||
self.container_data.container.id,
|
||||
"stop", self.coordinator.portainer, self.endpoint_id, self.device_id
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@@ -453,6 +453,10 @@ async def _async_generate_memory_profile(hass: HomeAssistant, call: ServiceCall)
|
||||
# Imports deferred to avoid loading modules
|
||||
# in memory since usually only one part of this
|
||||
# integration is used at a time
|
||||
if sys.version_info >= (3, 14):
|
||||
raise HomeAssistantError(
|
||||
"Memory profiling is not supported on Python 3.14. Please use Python 3.13."
|
||||
)
|
||||
from guppy import hpy # noqa: PLC0415
|
||||
|
||||
start_time = int(time.time() * 1000000)
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"pyprof2calltree==1.4.5",
|
||||
"guppy3==3.1.6",
|
||||
"guppy3==3.1.5;python_version<'3.14'",
|
||||
"objgraph==3.5.0"
|
||||
],
|
||||
"single_config_entry": true
|
||||
|
||||
@@ -8,7 +8,7 @@ from httpx import HTTPError, InvalidURL, TimeoutException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .client import get_calendar
|
||||
@@ -21,7 +21,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CALENDAR_NAME): str,
|
||||
vol.Required(CONF_URL): str,
|
||||
vol.Required(CONF_VERIFY_SSL, default=True): bool,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -49,7 +48,7 @@ class RemoteCalendarConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"webcal://", "https://", 1
|
||||
)
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
client = get_async_client(self.hass, verify_ssl=user_input[CONF_VERIFY_SSL])
|
||||
client = get_async_client(self.hass)
|
||||
try:
|
||||
res = await get_calendar(client, user_input[CONF_URL])
|
||||
if res.status_code == HTTPStatus.FORBIDDEN:
|
||||
|
||||
@@ -7,7 +7,7 @@ from httpx import HTTPError, InvalidURL, TimeoutException
|
||||
from ical.calendar import Calendar
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -42,9 +42,7 @@ class RemoteCalendarDataUpdateCoordinator(DataUpdateCoordinator[Calendar]):
|
||||
config_entry=config_entry,
|
||||
always_update=True,
|
||||
)
|
||||
self._client = get_async_client(
|
||||
hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True)
|
||||
)
|
||||
self._client = get_async_client(hass)
|
||||
self._url = config_entry.data[CONF_URL]
|
||||
|
||||
async def _async_update_data(self) -> Calendar:
|
||||
|
||||
@@ -13,13 +13,11 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"calendar_name": "Calendar name",
|
||||
"url": "Calendar URL",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
"url": "Calendar URL"
|
||||
},
|
||||
"data_description": {
|
||||
"calendar_name": "The name of the calendar shown in the UI.",
|
||||
"url": "The URL of the remote calendar.",
|
||||
"verify_ssl": "Enable SSL certificate verification for secure connections."
|
||||
"url": "The URL of the remote calendar."
|
||||
},
|
||||
"description": "Please choose a name for the calendar to be imported"
|
||||
}
|
||||
|
||||
@@ -552,7 +552,6 @@ class RoborockB01Q7UpdateCoordinator(RoborockDataUpdateCoordinatorB01):
|
||||
RoborockB01Props.CLEANING_TIME,
|
||||
RoborockB01Props.REAL_CLEAN_TIME,
|
||||
RoborockB01Props.HYPA,
|
||||
RoborockB01Props.WIND,
|
||||
]
|
||||
|
||||
async def _async_update_data(
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from roborock.data import RoborockStateCode, SCWindMapping, WorkStatusMapping
|
||||
from roborock.data import RoborockStateCode
|
||||
from roborock.exceptions import RoborockException
|
||||
from roborock.roborock_typing import RoborockCommand
|
||||
import voluptuous as vol
|
||||
@@ -24,12 +24,8 @@ from .const import (
|
||||
GET_VACUUM_CURRENT_POSITION_SERVICE_NAME,
|
||||
SET_VACUUM_GOTO_POSITION_SERVICE_NAME,
|
||||
)
|
||||
from .coordinator import (
|
||||
RoborockB01Q7UpdateCoordinator,
|
||||
RoborockConfigEntry,
|
||||
RoborockDataUpdateCoordinator,
|
||||
)
|
||||
from .entity import RoborockCoordinatedEntityB01, RoborockCoordinatedEntityV1
|
||||
from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator
|
||||
from .entity import RoborockCoordinatedEntityV1
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -61,20 +57,6 @@ STATE_CODE_TO_STATE = {
|
||||
RoborockStateCode.device_offline: VacuumActivity.ERROR, # "Device offline"
|
||||
}
|
||||
|
||||
Q7_STATE_CODE_TO_STATE = {
|
||||
WorkStatusMapping.SLEEPING: VacuumActivity.IDLE,
|
||||
WorkStatusMapping.WAITING_FOR_ORDERS: VacuumActivity.IDLE,
|
||||
WorkStatusMapping.PAUSED: VacuumActivity.PAUSED,
|
||||
WorkStatusMapping.DOCKING: VacuumActivity.RETURNING,
|
||||
WorkStatusMapping.CHARGING: VacuumActivity.DOCKED,
|
||||
WorkStatusMapping.SWEEP_MOPING: VacuumActivity.CLEANING,
|
||||
WorkStatusMapping.SWEEP_MOPING_2: VacuumActivity.CLEANING,
|
||||
WorkStatusMapping.MOPING: VacuumActivity.CLEANING,
|
||||
WorkStatusMapping.UPDATING: VacuumActivity.DOCKED,
|
||||
WorkStatusMapping.MOP_CLEANING: VacuumActivity.DOCKED,
|
||||
WorkStatusMapping.MOP_AIRDRYING: VacuumActivity.DOCKED,
|
||||
}
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@@ -87,11 +69,6 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
RoborockVacuum(coordinator) for coordinator in config_entry.runtime_data.v1
|
||||
)
|
||||
async_add_entities(
|
||||
RoborockQ7Vacuum(coordinator)
|
||||
for coordinator in config_entry.runtime_data.b01
|
||||
if isinstance(coordinator, RoborockB01Q7UpdateCoordinator)
|
||||
)
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
|
||||
platform.async_register_entity_service(
|
||||
@@ -264,149 +241,3 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity):
|
||||
"x": robot_position.x,
|
||||
"y": robot_position.y,
|
||||
}
|
||||
|
||||
|
||||
class RoborockQ7Vacuum(RoborockCoordinatedEntityB01, StateVacuumEntity):
|
||||
"""General Representation of a Roborock vacuum."""
|
||||
|
||||
_attr_icon = "mdi:robot-vacuum"
|
||||
_attr_supported_features = (
|
||||
VacuumEntityFeature.PAUSE
|
||||
| VacuumEntityFeature.STOP
|
||||
| VacuumEntityFeature.RETURN_HOME
|
||||
| VacuumEntityFeature.FAN_SPEED
|
||||
| VacuumEntityFeature.SEND_COMMAND
|
||||
| VacuumEntityFeature.LOCATE
|
||||
| VacuumEntityFeature.STATE
|
||||
| VacuumEntityFeature.START
|
||||
)
|
||||
_attr_translation_key = DOMAIN
|
||||
_attr_name = None
|
||||
coordinator: RoborockB01Q7UpdateCoordinator
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: RoborockB01Q7UpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize a vacuum."""
|
||||
StateVacuumEntity.__init__(self)
|
||||
RoborockCoordinatedEntityB01.__init__(
|
||||
self,
|
||||
coordinator.duid_slug,
|
||||
coordinator,
|
||||
)
|
||||
|
||||
@property
|
||||
def fan_speed_list(self) -> list[str]:
|
||||
"""Get the list of available fan speeds."""
|
||||
return SCWindMapping.keys()
|
||||
|
||||
@property
|
||||
def activity(self) -> VacuumActivity | None:
|
||||
"""Return the status of the vacuum cleaner."""
|
||||
if self.coordinator.data.status is not None:
|
||||
return Q7_STATE_CODE_TO_STATE.get(self.coordinator.data.status)
|
||||
return None
|
||||
|
||||
@property
|
||||
def fan_speed(self) -> str | None:
|
||||
"""Return the fan speed of the vacuum cleaner."""
|
||||
return self.coordinator.data.wind_name
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start the vacuum."""
|
||||
try:
|
||||
await self.coordinator.api.start_clean()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "start_clean",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_pause(self) -> None:
|
||||
"""Pause the vacuum."""
|
||||
try:
|
||||
await self.coordinator.api.pause_clean()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "pause_clean",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_stop(self, **kwargs: Any) -> None:
|
||||
"""Stop the vacuum."""
|
||||
try:
|
||||
await self.coordinator.api.stop_clean()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "stop_clean",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_return_to_base(self, **kwargs: Any) -> None:
|
||||
"""Send vacuum back to base."""
|
||||
try:
|
||||
await self.coordinator.api.return_to_dock()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "return_to_dock",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_locate(self, **kwargs: Any) -> None:
|
||||
"""Locate vacuum."""
|
||||
try:
|
||||
await self.coordinator.api.find_me()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "find_me",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None:
|
||||
"""Set vacuum fan speed."""
|
||||
try:
|
||||
await self.coordinator.api.set_fan_speed(
|
||||
SCWindMapping.from_value(fan_speed)
|
||||
)
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "set_fan_speed",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_send_command(
|
||||
self,
|
||||
command: str,
|
||||
params: dict[str, Any] | list[Any] | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Send a command to a vacuum cleaner."""
|
||||
try:
|
||||
await self.coordinator.api.send(command, params)
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": command,
|
||||
},
|
||||
) from err
|
||||
|
||||
@@ -16,5 +16,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ruuvitag_ble",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["ruuvitag-ble==0.4.0"]
|
||||
"requirements": ["ruuvitag-ble==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"samsungctl[websocket]==0.7.1",
|
||||
"samsungtvws[async,encrypted]==2.7.2",
|
||||
"wakeonlan==3.1.0",
|
||||
"async-upnp-client==0.46.2"
|
||||
"async-upnp-client==0.46.1"
|
||||
],
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysaunum"],
|
||||
"quality_scale": "gold",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pysaunum==0.1.0"]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user