mirror of
https://github.com/home-assistant/core.git
synced 2025-10-27 12:39:39 +00:00
Compare commits
10 Commits
copilot/mo
...
llm_device
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f45b8eb0bc | ||
|
|
61b06c5cee | ||
|
|
d599524880 | ||
|
|
7c6c6ff7ff | ||
|
|
4b343c10a5 | ||
|
|
5532570dae | ||
|
|
5bd912c730 | ||
|
|
ad9efd6429 | ||
|
|
3b59a03dfa | ||
|
|
78bf54de42 |
@@ -33,7 +33,7 @@
|
||||
"GitHub.vscode-pull-request-github",
|
||||
"GitHub.copilot"
|
||||
],
|
||||
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.jsonc
|
||||
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json
|
||||
"settings": {
|
||||
"python.experiments.optOutFrom": ["pythonTestAdapter"],
|
||||
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||
@@ -63,9 +63,6 @@
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
},
|
||||
"[json][jsonc][yaml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -111,7 +111,6 @@ virtualization/vagrant/config
|
||||
!.vscode/cSpell.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/settings.default.jsonc
|
||||
.env
|
||||
|
||||
# Windows Explorer
|
||||
@@ -141,5 +140,4 @@ pytest_buckets.txt
|
||||
|
||||
# AI tooling
|
||||
.claude/settings.local.json
|
||||
.serena/
|
||||
|
||||
|
||||
@@ -9,17 +9,13 @@
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
// Pyright is too pedantic for Home Assistant
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
},
|
||||
"[json][jsonc][yaml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||
// This value differs between working with devcontainer and locally, therefore this value should NOT be in sync!
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
{
|
||||
"fileMatch": [
|
||||
"homeassistant/components/*/manifest.json"
|
||||
],
|
||||
// This value differs between working with devcontainer and locally, therefor this value should NOT be in sync!
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -494,8 +494,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/filesize/ @gjohansson-ST
|
||||
/homeassistant/components/filter/ @dgomes
|
||||
/tests/components/filter/ @dgomes
|
||||
/homeassistant/components/fing/ @Lorenzo-Gasparini
|
||||
/tests/components/fing/ @Lorenzo-Gasparini
|
||||
/homeassistant/components/firefly_iii/ @erwindouna
|
||||
/tests/components/firefly_iii/ @erwindouna
|
||||
/homeassistant/components/fireservicerota/ @cyberjunky
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.2"]
|
||||
"requirements": ["aioairzone==1.0.1"]
|
||||
}
|
||||
|
||||
@@ -816,20 +816,13 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
return MediaPlayerState.PAUSED
|
||||
if media_status.player_is_idle:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
if self._chromecast is not None and self._chromecast.is_idle:
|
||||
# If library consider us idle, that is our off state
|
||||
# it takes HDMI status into account for cast devices.
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
if self.app_id in APP_IDS_UNRELIABLE_MEDIA_INFO:
|
||||
# Some apps don't report media status, show the player as playing
|
||||
return MediaPlayerState.PLAYING
|
||||
|
||||
if self.app_id is not None:
|
||||
# We have an active app
|
||||
if self.app_id is not None and self.app_id != pychromecast.IDLE_APP_ID:
|
||||
if self.app_id in APP_IDS_UNRELIABLE_MEDIA_INFO:
|
||||
# Some apps don't report media status, show the player as playing
|
||||
return MediaPlayerState.PLAYING
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
if self._chromecast is not None and self._chromecast.is_idle:
|
||||
return MediaPlayerState.OFF
|
||||
return None
|
||||
|
||||
@property
|
||||
|
||||
@@ -5,7 +5,7 @@ from aiocomelit.const import BRIDGE
|
||||
from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONF_VEDO_PIN, DEFAULT_PORT
|
||||
from .const import DEFAULT_PORT
|
||||
from .coordinator import (
|
||||
ComelitBaseCoordinator,
|
||||
ComelitConfigEntry,
|
||||
@@ -43,13 +43,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> b
|
||||
entry.data[CONF_HOST],
|
||||
entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
entry.data[CONF_PIN],
|
||||
entry.data.get(CONF_VEDO_PIN),
|
||||
session,
|
||||
)
|
||||
platforms = list(BRIDGE_PLATFORMS)
|
||||
# Add VEDO platforms if vedo_pin is configured
|
||||
if entry.data.get(CONF_VEDO_PIN):
|
||||
platforms.extend(VEDO_PLATFORMS)
|
||||
platforms = BRIDGE_PLATFORMS
|
||||
else:
|
||||
coordinator = ComelitVedoSystem(
|
||||
hass,
|
||||
@@ -74,10 +70,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ComelitConfigEntry) ->
|
||||
"""Unload a config entry."""
|
||||
|
||||
if entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
platforms = list(BRIDGE_PLATFORMS)
|
||||
# Add VEDO platforms if vedo_pin was configured
|
||||
if entry.data.get(CONF_VEDO_PIN):
|
||||
platforms.extend(VEDO_PLATFORMS)
|
||||
platforms = BRIDGE_PLATFORMS
|
||||
else:
|
||||
platforms = VEDO_PLATFORMS
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
from typing import cast
|
||||
|
||||
from aiocomelit.api import ComelitVedoAreaObject
|
||||
from aiocomelit.const import BRIDGE, AlarmAreaState
|
||||
from aiocomelit.const import AlarmAreaState
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
@@ -14,13 +14,11 @@ from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelState,
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.const import CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
from .utils import DeviceType, alarm_device_listener
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -58,34 +56,12 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Comelit VEDO system alarm control panel devices."""
|
||||
|
||||
if config_entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
# Only setup if bridge has VEDO alarm enabled
|
||||
if not coordinator.vedo_pin:
|
||||
return
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitBridgeAlarmEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in (coordinator.alarm_data or {})
|
||||
.get("alarm_areas", {})
|
||||
.values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
alarm_device_listener(coordinator, _add_new_entities, "alarm_areas")
|
||||
)
|
||||
else:
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
async_add_entities(
|
||||
ComelitAlarmEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data["alarm_areas"].values()
|
||||
)
|
||||
async_add_entities(
|
||||
ComelitAlarmEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data["alarm_areas"].values()
|
||||
)
|
||||
|
||||
|
||||
class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanelEntity):
|
||||
@@ -195,133 +171,3 @@ class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanel
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[NIGHT]
|
||||
)
|
||||
|
||||
|
||||
class ComelitBridgeAlarmEntity(
|
||||
CoordinatorEntity[ComelitSerialBridge], AlarmControlPanelEntity
|
||||
):
|
||||
"""Representation of a VEDO alarm panel on a Serial Bridge."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_code_format = CodeFormat.NUMBER
|
||||
_attr_code_arm_required = False
|
||||
_attr_supported_features = (
|
||||
AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.ARM_HOME
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ComelitSerialBridge,
|
||||
area: ComelitVedoAreaObject,
|
||||
config_entry_entry_id: str,
|
||||
) -> None:
|
||||
"""Initialize the alarm panel."""
|
||||
self._area_index = area.index
|
||||
super().__init__(coordinator)
|
||||
# Use config_entry.entry_id as base for unique_id
|
||||
# because no serial number or mac is available
|
||||
self._attr_unique_id = f"{config_entry_entry_id}-{area.index}"
|
||||
self._attr_device_info = coordinator.platform_device_info(area, "area")
|
||||
if area.p2:
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
|
||||
@property
|
||||
def _area(self) -> ComelitVedoAreaObject:
|
||||
"""Return area object."""
|
||||
if self.coordinator.alarm_data:
|
||||
return self.coordinator.alarm_data["alarm_areas"][self._area_index]
|
||||
# Return a default area object if no alarm data
|
||||
return ComelitVedoAreaObject(
|
||||
index=self._area_index,
|
||||
name="Unknown",
|
||||
p1=False,
|
||||
p2=False,
|
||||
ready=False,
|
||||
armed=0,
|
||||
alarm=False,
|
||||
alarm_memory=False,
|
||||
sabotage=False,
|
||||
anomaly=False,
|
||||
in_time=False,
|
||||
out_time=False,
|
||||
human_status=AlarmAreaState.UNKNOWN,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if alarm is available."""
|
||||
if not self.coordinator.alarm_data:
|
||||
return False
|
||||
if self._area.human_status in [AlarmAreaState.ANOMALY, AlarmAreaState.UNKNOWN]:
|
||||
return False
|
||||
return super().available
|
||||
|
||||
@property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
"""Return the state of the alarm."""
|
||||
|
||||
_LOGGER.debug(
|
||||
"Area %s status is: %s. Armed is %s",
|
||||
self._area.name,
|
||||
self._area.human_status,
|
||||
self._area.armed,
|
||||
)
|
||||
if self._area.human_status == AlarmAreaState.ARMED:
|
||||
if self._area.armed == ALARM_AREA_ARMED_STATUS[AWAY]:
|
||||
return AlarmControlPanelState.ARMED_AWAY
|
||||
if self._area.armed == ALARM_AREA_ARMED_STATUS[NIGHT]:
|
||||
return AlarmControlPanelState.ARMED_NIGHT
|
||||
return AlarmControlPanelState.ARMED_HOME
|
||||
|
||||
return {
|
||||
AlarmAreaState.DISARMED: AlarmControlPanelState.DISARMED,
|
||||
AlarmAreaState.ENTRY_DELAY: AlarmControlPanelState.DISARMING,
|
||||
AlarmAreaState.EXIT_DELAY: AlarmControlPanelState.ARMING,
|
||||
AlarmAreaState.TRIGGERED: AlarmControlPanelState.TRIGGERED,
|
||||
}.get(self._area.human_status)
|
||||
|
||||
async def _async_update_state(self, area_state: AlarmAreaState, armed: int) -> None:
|
||||
"""Update state after action."""
|
||||
self._area.human_status = area_state
|
||||
self._area.armed = armed
|
||||
await self.async_update_ha_state()
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
if code != str(self.coordinator.vedo_pin):
|
||||
return
|
||||
await self.coordinator.api.set_zone_status(
|
||||
self._area.index, ALARM_ACTIONS[DISABLE]
|
||||
)
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.DISARMED, ALARM_AREA_ARMED_STATUS[DISABLE]
|
||||
)
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
await self.coordinator.api.set_zone_status(
|
||||
self._area.index, ALARM_ACTIONS[AWAY]
|
||||
)
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[AWAY]
|
||||
)
|
||||
|
||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send arm home command."""
|
||||
await self.coordinator.api.set_zone_status(
|
||||
self._area.index, ALARM_ACTIONS[HOME]
|
||||
)
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[HOME_P1]
|
||||
)
|
||||
|
||||
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
||||
"""Send arm night command."""
|
||||
await self.coordinator.api.set_zone_status(
|
||||
self._area.index, ALARM_ACTIONS[NIGHT]
|
||||
)
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[NIGHT]
|
||||
)
|
||||
|
||||
@@ -5,19 +5,17 @@ from __future__ import annotations
|
||||
from typing import cast
|
||||
|
||||
from aiocomelit import ComelitVedoZoneObject
|
||||
from aiocomelit.const import BRIDGE
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
from .utils import DeviceType, alarm_device_listener, new_device_listener
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
from .utils import DeviceType, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -30,47 +28,21 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Comelit VEDO presence sensors."""
|
||||
|
||||
if config_entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
# Only setup if bridge has VEDO alarm enabled
|
||||
if not coordinator.vedo_pin:
|
||||
return
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoBridgeBinarySensorEntity(
|
||||
coordinator, device, config_entry.entry_id
|
||||
)
|
||||
for device in (coordinator.alarm_data or {})
|
||||
.get("alarm_zones", {})
|
||||
.values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
alarm_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
)
|
||||
else:
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoBinarySensorEntity(
|
||||
coordinator, device, config_entry.entry_id
|
||||
)
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
)
|
||||
|
||||
|
||||
class ComelitVedoBinarySensorEntity(
|
||||
@@ -101,41 +73,3 @@ class ComelitVedoBinarySensorEntity(
|
||||
return (
|
||||
self.coordinator.data["alarm_zones"][self._zone_index].status_api == "0001"
|
||||
)
|
||||
|
||||
|
||||
class ComelitVedoBridgeBinarySensorEntity(
|
||||
CoordinatorEntity[ComelitSerialBridge], BinarySensorEntity
|
||||
):
|
||||
"""VEDO sensor device on a Serial Bridge."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_device_class = BinarySensorDeviceClass.MOTION
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ComelitSerialBridge,
|
||||
zone: ComelitVedoZoneObject,
|
||||
config_entry_entry_id: str,
|
||||
) -> None:
|
||||
"""Init sensor entity."""
|
||||
self._zone_index = zone.index
|
||||
super().__init__(coordinator)
|
||||
# Use config_entry.entry_id as base for unique_id
|
||||
# because no serial number or mac is available
|
||||
self._attr_unique_id = f"{config_entry_entry_id}-presence-{zone.index}"
|
||||
self._attr_device_info = coordinator.platform_device_info(zone, "zone")
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Sensor availability."""
|
||||
return self.coordinator.alarm_data is not None
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Presence detected."""
|
||||
if not self.coordinator.alarm_data:
|
||||
return False
|
||||
return (
|
||||
self.coordinator.alarm_data["alarm_zones"][self._zone_index].status_api
|
||||
== "0001"
|
||||
)
|
||||
|
||||
@@ -22,7 +22,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import _LOGGER, CONF_VEDO_PIN, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
|
||||
from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
|
||||
from .utils import async_client_session
|
||||
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
@@ -34,7 +34,6 @@ USER_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
vol.Optional(CONF_VEDO_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
@@ -43,7 +42,6 @@ STEP_RECONFIGURE = vol.Schema(
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
vol.Optional(CONF_VEDO_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -81,27 +79,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
finally:
|
||||
await api.logout()
|
||||
|
||||
# Validate VEDO PIN if provided and device type is BRIDGE
|
||||
if data.get(CONF_VEDO_PIN) and data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
if not re.fullmatch(r"[0-9]{4,10}", data[CONF_VEDO_PIN]):
|
||||
raise InvalidVedoPin
|
||||
|
||||
# Verify VEDO is enabled with the provided PIN
|
||||
try:
|
||||
if not await api.vedo_enabled(data[CONF_VEDO_PIN]):
|
||||
raise InvalidVedoAuth
|
||||
except (aiocomelit_exceptions.CannotConnect, TimeoutError) as err:
|
||||
raise CannotConnect(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except aiocomelit_exceptions.CannotAuthenticate:
|
||||
raise InvalidVedoAuth(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_vedo_auth",
|
||||
) from None
|
||||
|
||||
return {"title": data[CONF_HOST]}
|
||||
|
||||
|
||||
@@ -129,10 +106,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except InvalidVedoPin:
|
||||
errors["base"] = "invalid_vedo_pin"
|
||||
except InvalidVedoAuth:
|
||||
errors["base"] = "invalid_vedo_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -214,38 +187,19 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
try:
|
||||
data_to_validate = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
|
||||
}
|
||||
if CONF_VEDO_PIN in user_input:
|
||||
data_to_validate[CONF_VEDO_PIN] = user_input[CONF_VEDO_PIN]
|
||||
await validate_input(self.hass, data_to_validate)
|
||||
await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except InvalidVedoPin:
|
||||
errors["base"] = "invalid_vedo_pin"
|
||||
except InvalidVedoAuth:
|
||||
errors["base"] = "invalid_vedo_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
data_updates = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
}
|
||||
if CONF_VEDO_PIN in user_input:
|
||||
data_updates[CONF_VEDO_PIN] = user_input[CONF_VEDO_PIN]
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates=data_updates
|
||||
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
@@ -265,11 +219,3 @@ class InvalidAuth(HomeAssistantError):
|
||||
|
||||
class InvalidPin(HomeAssistantError):
|
||||
"""Error to indicate an invalid pin."""
|
||||
|
||||
|
||||
class InvalidVedoPin(HomeAssistantError):
|
||||
"""Error to indicate an invalid VEDO pin."""
|
||||
|
||||
|
||||
class InvalidVedoAuth(HomeAssistantError):
|
||||
"""Error to indicate VEDO authentication failed."""
|
||||
|
||||
@@ -9,7 +9,6 @@ _LOGGER = logging.getLogger(__package__)
|
||||
DOMAIN = "comelit"
|
||||
DEFAULT_PORT = 80
|
||||
DEVICE_TYPE_LIST = [BRIDGE, VEDO]
|
||||
CONF_VEDO_PIN = "vedo_pin"
|
||||
|
||||
SCAN_INTERVAL = 5
|
||||
|
||||
|
||||
@@ -154,8 +154,6 @@ class ComelitSerialBridge(
|
||||
|
||||
_hw_version = "20003101"
|
||||
api: ComeliteSerialBridgeApi
|
||||
vedo_pin: str | None
|
||||
alarm_data: AlarmDataObject | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -164,49 +162,25 @@ class ComelitSerialBridge(
|
||||
host: str,
|
||||
port: int,
|
||||
pin: str,
|
||||
vedo_pin: str | None,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
self.api = ComeliteSerialBridgeApi(host, port, pin, session)
|
||||
self.vedo_pin = vedo_pin
|
||||
super().__init__(hass, entry, BRIDGE, host)
|
||||
|
||||
async def _async_update_system_data(
|
||||
self,
|
||||
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
|
||||
"""Specific method for updating data."""
|
||||
devices = await self.api.get_all_devices()
|
||||
data = await self.api.get_all_devices()
|
||||
|
||||
if self.data:
|
||||
for dev_type in (CLIMATE, COVER, LIGHT, IRRIGATION, OTHER, SCENARIO):
|
||||
await self._async_remove_stale_devices(
|
||||
self.data[dev_type], devices[dev_type], dev_type
|
||||
self.data[dev_type], data[dev_type], dev_type
|
||||
)
|
||||
|
||||
# Get VEDO alarm data if vedo_pin is configured
|
||||
if self.vedo_pin:
|
||||
try:
|
||||
if await self.api.vedo_enabled(self.vedo_pin):
|
||||
self.alarm_data = await self.api.get_all_areas_and_zones()
|
||||
|
||||
# Remove stale alarm devices
|
||||
if self.alarm_data:
|
||||
previous_alarm_data = getattr(
|
||||
self, "_previous_alarm_data", None
|
||||
)
|
||||
if previous_alarm_data:
|
||||
for obj_type in ("alarm_areas", "alarm_zones"):
|
||||
await self._async_remove_stale_devices(
|
||||
previous_alarm_data[obj_type],
|
||||
self.alarm_data[obj_type],
|
||||
"area" if obj_type == "alarm_areas" else "zone",
|
||||
)
|
||||
self._previous_alarm_data = self.alarm_data
|
||||
except (CannotAuthenticate, CannotConnect, CannotRetrieveData):
|
||||
_LOGGER.warning("Failed to retrieve VEDO alarm data")
|
||||
|
||||
return devices
|
||||
return data
|
||||
|
||||
|
||||
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
|
||||
@@ -20,7 +20,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, alarm_device_listener, new_device_listener
|
||||
from .utils import DeviceType, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -83,30 +83,6 @@ async def async_setup_bridge_entry(
|
||||
new_device_listener(coordinator, _add_new_entities, OTHER)
|
||||
)
|
||||
|
||||
# Add VEDO sensors if bridge has alarm data
|
||||
if coordinator.vedo_pin:
|
||||
|
||||
def _add_new_alarm_entities(
|
||||
new_devices: list[DeviceType], dev_type: str
|
||||
) -> None:
|
||||
"""Add entities for new alarm zones."""
|
||||
entities = [
|
||||
ComelitVedoBridgeSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
)
|
||||
for sensor_desc in SENSOR_VEDO_TYPES
|
||||
for device in (coordinator.alarm_data or {})
|
||||
.get("alarm_zones", {})
|
||||
.values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
alarm_device_listener(coordinator, _add_new_alarm_entities, "alarm_zones")
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_vedo_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -203,58 +179,3 @@ class ComelitVedoSensorEntity(CoordinatorEntity[ComelitVedoSystem], SensorEntity
|
||||
return None
|
||||
|
||||
return cast(str, status.value)
|
||||
|
||||
|
||||
class ComelitVedoBridgeSensorEntity(
|
||||
CoordinatorEntity[ComelitSerialBridge], SensorEntity
|
||||
):
|
||||
"""VEDO sensor device on a Serial Bridge."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ComelitSerialBridge,
|
||||
zone: ComelitVedoZoneObject,
|
||||
config_entry_entry_id: str,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Init sensor entity."""
|
||||
self._zone_index = zone.index
|
||||
super().__init__(coordinator)
|
||||
# Use config_entry.entry_id as base for unique_id
|
||||
# because no serial number or mac is available
|
||||
self._attr_unique_id = f"{config_entry_entry_id}-{zone.index}"
|
||||
self._attr_device_info = coordinator.platform_device_info(zone, "zone")
|
||||
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def _zone_object(self) -> ComelitVedoZoneObject:
|
||||
"""Zone object."""
|
||||
if self.coordinator.alarm_data:
|
||||
return self.coordinator.alarm_data["alarm_zones"][self._zone_index]
|
||||
# Return a default zone object if no alarm data
|
||||
return ComelitVedoZoneObject(
|
||||
index=self._zone_index,
|
||||
name="Unknown",
|
||||
status_api="0x000",
|
||||
status=0,
|
||||
human_status=AlarmZoneState.UNAVAILABLE,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Sensor availability."""
|
||||
return (
|
||||
self.coordinator.alarm_data is not None
|
||||
and self._zone_object.human_status != AlarmZoneState.UNAVAILABLE
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Sensor value."""
|
||||
if (status := self._zone_object.human_status) == AlarmZoneState.UNKNOWN:
|
||||
return None
|
||||
|
||||
return cast(str, status.value)
|
||||
|
||||
@@ -15,29 +15,25 @@
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"pin": "[%key:common::config_flow::data::pin%]",
|
||||
"type": "Device type",
|
||||
"vedo_pin": "VEDO alarm PIN (optional)"
|
||||
"type": "Device type"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Comelit device.",
|
||||
"port": "The port of your Comelit device.",
|
||||
"pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]",
|
||||
"type": "The type of your Comelit device.",
|
||||
"vedo_pin": "Optional PIN for VEDO alarm system on Serial Bridge devices. Leave empty if you don't have VEDO alarm enabled."
|
||||
"type": "The type of your Comelit device."
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"pin": "[%key:common::config_flow::data::pin%]",
|
||||
"vedo_pin": "[%key:component::comelit::config::step::user::data::vedo_pin%]"
|
||||
"pin": "[%key:common::config_flow::data::pin%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::comelit::config::step::user::data_description::host%]",
|
||||
"port": "[%key:component::comelit::config::step::user::data_description::port%]",
|
||||
"pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]",
|
||||
"vedo_pin": "[%key:component::comelit::config::step::user::data_description::vedo_pin%]"
|
||||
"pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -48,16 +44,12 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "The provided PIN is invalid. It must be a 4-10 digit number.",
|
||||
"invalid_vedo_pin": "The provided VEDO PIN is invalid. It must be a 4-10 digit number.",
|
||||
"invalid_vedo_auth": "The provided VEDO PIN is incorrect or VEDO alarm is not enabled on this device.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "[%key:component::comelit::config::abort::invalid_pin%]",
|
||||
"invalid_vedo_pin": "[%key:component::comelit::config::abort::invalid_vedo_pin%]",
|
||||
"invalid_vedo_auth": "[%key:component::comelit::config::abort::invalid_vedo_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -158,35 +158,3 @@ def new_device_listener(
|
||||
_check_devices()
|
||||
|
||||
return coordinator.async_add_listener(_check_devices)
|
||||
|
||||
|
||||
def alarm_device_listener(
|
||||
coordinator: ComelitBaseCoordinator,
|
||||
new_devices_callback: Callable[
|
||||
[list[ComelitVedoAreaObject | ComelitVedoZoneObject], str],
|
||||
None,
|
||||
],
|
||||
data_type: str,
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to coordinator updates to check for new alarm devices on bridge."""
|
||||
known_devices: dict[str, list[int]] = {}
|
||||
|
||||
def _check_alarm_devices() -> None:
|
||||
"""Check for new alarm devices and call callback with any new devices."""
|
||||
# For ComelitSerialBridge with alarm_data
|
||||
if not hasattr(coordinator, "alarm_data") or not coordinator.alarm_data:
|
||||
return
|
||||
|
||||
new_devices: list[ComelitVedoAreaObject | ComelitVedoZoneObject] = []
|
||||
for _id in coordinator.alarm_data[data_type]:
|
||||
if _id not in (id_list := known_devices.get(data_type, [])):
|
||||
known_devices.update({data_type: [*id_list, _id]})
|
||||
new_devices.append(coordinator.alarm_data[data_type][_id])
|
||||
|
||||
if new_devices:
|
||||
new_devices_callback(new_devices, data_type)
|
||||
|
||||
# Check for devices immediately
|
||||
_check_alarm_devices()
|
||||
|
||||
return coordinator.async_add_listener(_check_alarm_devices)
|
||||
|
||||
@@ -34,7 +34,7 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.LIGHT, Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [Platform.LIGHT, Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,301 +0,0 @@
|
||||
"""Platform for Control4 Climate/Thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyControl4.climate import C4Climate
|
||||
from pyControl4.error_handling import C4Exception
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from . import Control4ConfigEntry, Control4RuntimeData, get_items_of_category
|
||||
from .const import CONTROL4_ENTITY_TYPE
|
||||
from .director_utils import update_variables_for_config_entry
|
||||
from .entity import Control4Entity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONTROL4_CATEGORY = "comfort"
|
||||
|
||||
# Control4 variable names
|
||||
CONTROL4_HVAC_STATE = "HVAC_STATE"
|
||||
CONTROL4_HVAC_MODE = "HVAC_MODE"
|
||||
CONTROL4_CURRENT_TEMPERATURE = "TEMPERATURE_F"
|
||||
CONTROL4_HUMIDITY = "HUMIDITY"
|
||||
CONTROL4_COOL_SETPOINT = "COOL_SETPOINT_F"
|
||||
CONTROL4_HEAT_SETPOINT = "HEAT_SETPOINT_F"
|
||||
|
||||
VARIABLES_OF_INTEREST = {
|
||||
CONTROL4_HVAC_STATE,
|
||||
CONTROL4_HVAC_MODE,
|
||||
CONTROL4_CURRENT_TEMPERATURE,
|
||||
CONTROL4_HUMIDITY,
|
||||
CONTROL4_COOL_SETPOINT,
|
||||
CONTROL4_HEAT_SETPOINT,
|
||||
}
|
||||
|
||||
# Map Control4 HVAC modes to Home Assistant
|
||||
C4_TO_HA_HVAC_MODE = {
|
||||
"Off": HVACMode.OFF,
|
||||
"Cool": HVACMode.COOL,
|
||||
"Heat": HVACMode.HEAT,
|
||||
"Auto": HVACMode.HEAT_COOL,
|
||||
}
|
||||
|
||||
HA_TO_C4_HVAC_MODE = {v: k for k, v in C4_TO_HA_HVAC_MODE.items()}
|
||||
|
||||
# Map Control4 HVAC state to Home Assistant HVAC action
|
||||
C4_TO_HA_HVAC_ACTION = {
|
||||
"heating": HVACAction.HEATING,
|
||||
"cooling": HVACAction.COOLING,
|
||||
"idle": HVACAction.IDLE,
|
||||
"off": HVACAction.OFF,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: Control4ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Control4 thermostats from a config entry."""
|
||||
runtime_data = entry.runtime_data
|
||||
|
||||
async def async_update_data() -> dict[int, dict[str, Any]]:
|
||||
"""Fetch data from Control4 director for thermostats."""
|
||||
try:
|
||||
return await update_variables_for_config_entry(
|
||||
hass, entry, VARIABLES_OF_INTEREST
|
||||
)
|
||||
except C4Exception as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
|
||||
coordinator = DataUpdateCoordinator[dict[int, dict[str, Any]]](
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="climate",
|
||||
update_method=async_update_data,
|
||||
update_interval=timedelta(seconds=runtime_data.scan_interval),
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
await coordinator.async_refresh()
|
||||
|
||||
items_of_category = await get_items_of_category(hass, entry, CONTROL4_CATEGORY)
|
||||
entity_list = []
|
||||
for item in items_of_category:
|
||||
try:
|
||||
if item["type"] == CONTROL4_ENTITY_TYPE:
|
||||
item_name = item["name"]
|
||||
item_id = item["id"]
|
||||
item_parent_id = item["parentId"]
|
||||
item_manufacturer = None
|
||||
item_device_name = None
|
||||
item_model = None
|
||||
|
||||
for parent_item in items_of_category:
|
||||
if parent_item["id"] == item_parent_id:
|
||||
item_manufacturer = parent_item.get("manufacturer")
|
||||
item_device_name = parent_item.get("roomName")
|
||||
item_model = parent_item.get("model")
|
||||
else:
|
||||
continue
|
||||
except KeyError:
|
||||
_LOGGER.exception(
|
||||
"Unknown device properties received from Control4: %s",
|
||||
item,
|
||||
)
|
||||
continue
|
||||
|
||||
# Skip if we don't have data for this thermostat
|
||||
if item_id not in coordinator.data:
|
||||
_LOGGER.warning(
|
||||
"Couldn't get climate state data for %s (ID: %s), skipping setup",
|
||||
item_name,
|
||||
item_id,
|
||||
)
|
||||
continue
|
||||
|
||||
entity_list.append(
|
||||
Control4Climate(
|
||||
runtime_data,
|
||||
coordinator,
|
||||
item_name,
|
||||
item_id,
|
||||
item_device_name,
|
||||
item_manufacturer,
|
||||
item_model,
|
||||
item_parent_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entity_list)
|
||||
|
||||
|
||||
class Control4Climate(Control4Entity, ClimateEntity):
|
||||
"""Control4 climate entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT, HVACMode.COOL, HVACMode.HEAT_COOL]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
runtime_data: Control4RuntimeData,
|
||||
coordinator: DataUpdateCoordinator[dict[int, dict[str, Any]]],
|
||||
name: str,
|
||||
idx: int,
|
||||
device_name: str | None,
|
||||
device_manufacturer: str | None,
|
||||
device_model: str | None,
|
||||
device_id: int,
|
||||
) -> None:
|
||||
"""Initialize Control4 climate entity."""
|
||||
super().__init__(
|
||||
runtime_data,
|
||||
coordinator,
|
||||
name,
|
||||
idx,
|
||||
device_name,
|
||||
device_manufacturer,
|
||||
device_model,
|
||||
device_id,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self._thermostat_data is not None
|
||||
|
||||
def _create_api_object(self) -> C4Climate:
|
||||
"""Create a pyControl4 device object.
|
||||
|
||||
This exists so the director token used is always the latest one, without needing to re-init the entire entity.
|
||||
"""
|
||||
return C4Climate(self.runtime_data.director, self._idx)
|
||||
|
||||
@property
|
||||
def _thermostat_data(self) -> dict[str, Any] | None:
|
||||
"""Return the thermostat data from the coordinator."""
|
||||
return self.coordinator.data.get(self._idx)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
return data.get(CONTROL4_CURRENT_TEMPERATURE)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
humidity = data.get(CONTROL4_HUMIDITY)
|
||||
return int(humidity) if humidity is not None else None
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return current HVAC mode."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return HVACMode.OFF
|
||||
c4_mode = data.get(CONTROL4_HVAC_MODE) or ""
|
||||
return C4_TO_HA_HVAC_MODE.get(c4_mode, HVACMode.OFF)
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return current HVAC action."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
c4_state = data.get(CONTROL4_HVAC_STATE)
|
||||
if c4_state is None:
|
||||
return None
|
||||
# Convert state to lowercase for mapping
|
||||
return C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
hvac_mode = self.hvac_mode
|
||||
if hvac_mode == HVACMode.COOL:
|
||||
return data.get(CONTROL4_COOL_SETPOINT)
|
||||
if hvac_mode == HVACMode.HEAT:
|
||||
return data.get(CONTROL4_HEAT_SETPOINT)
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature_high(self) -> float | None:
|
||||
"""Return the high target temperature for auto mode."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
return data.get(CONTROL4_COOL_SETPOINT)
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature_low(self) -> float | None:
|
||||
"""Return the low target temperature for auto mode."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
return data.get(CONTROL4_HEAT_SETPOINT)
|
||||
return None
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target HVAC mode."""
|
||||
c4_hvac_mode = HA_TO_C4_HVAC_MODE[hvac_mode]
|
||||
c4_climate = self._create_api_object()
|
||||
await c4_climate.setHvacMode(c4_hvac_mode)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
c4_climate = self._create_api_object()
|
||||
low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW)
|
||||
high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH)
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
|
||||
# Handle temperature range for auto mode
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
if low_temp is not None:
|
||||
await c4_climate.setHeatSetpointF(low_temp)
|
||||
if high_temp is not None:
|
||||
await c4_climate.setCoolSetpointF(high_temp)
|
||||
# Handle single temperature setpoint
|
||||
elif temp is not None:
|
||||
if self.hvac_mode == HVACMode.COOL:
|
||||
await c4_climate.setCoolSetpointF(temp)
|
||||
elif self.hvac_mode == HVACMode.HEAT:
|
||||
await c4_climate.setHeatSetpointF(temp)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -122,12 +122,10 @@ class WanIpSensor(SensorEntity):
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
except TimeoutError as err:
|
||||
_LOGGER.debug("Timeout while resolving host: %s", err)
|
||||
except TimeoutError:
|
||||
await self.resolver.close()
|
||||
except DNSError as err:
|
||||
_LOGGER.warning("Exception while resolving host: %s", err)
|
||||
await self.resolver.close()
|
||||
|
||||
if response:
|
||||
sorted_ips = sort_ips(
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/droplet",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pydroplet==2.3.4"],
|
||||
"requirements": ["pydroplet==2.3.3"],
|
||||
"zeroconf": ["_droplet._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
"""The Fing integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
|
||||
from .coordinator import FingConfigEntry, FingDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.DEVICE_TRACKER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: FingConfigEntry) -> bool:
|
||||
"""Set up the Fing component."""
|
||||
|
||||
coordinator = FingDataUpdateCoordinator(hass, config_entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
if coordinator.data.network_id is None:
|
||||
_LOGGER.warning(
|
||||
"Skip setting up Fing integration; Received an empty NetworkId from the request - Check if the API version is the latest"
|
||||
)
|
||||
raise ConfigEntryError(
|
||||
"The Agent's API version is outdated. Please update the agent to the latest version."
|
||||
)
|
||||
|
||||
config_entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: FingConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
@@ -1,114 +0,0 @@
|
||||
"""Config flow file."""
|
||||
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from fing_agent_api import FingAgent
|
||||
import httpx
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_IP_ADDRESS, CONF_PORT
|
||||
|
||||
from .const import DOMAIN, UPNP_AVAILABLE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Fing config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Set up user step."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
devices_response = None
|
||||
agent_info_response = None
|
||||
|
||||
self._async_abort_entries_match(
|
||||
{CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS]}
|
||||
)
|
||||
|
||||
fing_api = FingAgent(
|
||||
ip=user_input[CONF_IP_ADDRESS],
|
||||
port=int(user_input[CONF_PORT]),
|
||||
key=user_input[CONF_API_KEY],
|
||||
)
|
||||
|
||||
try:
|
||||
devices_response = await fing_api.get_devices()
|
||||
|
||||
with suppress(httpx.ConnectError):
|
||||
# The suppression is needed because the get_agent_info method isn't available for desktop agents
|
||||
agent_info_response = await fing_api.get_agent_info()
|
||||
|
||||
except httpx.NetworkError as _:
|
||||
errors["base"] = "cannot_connect"
|
||||
except httpx.TimeoutException as _:
|
||||
errors["base"] = "timeout_connect"
|
||||
except httpx.HTTPStatusError as exception:
|
||||
description_placeholders["message"] = (
|
||||
f"{exception.response.status_code} - {exception.response.reason_phrase}"
|
||||
)
|
||||
if exception.response.status_code == 401:
|
||||
errors["base"] = "invalid_api_key"
|
||||
else:
|
||||
errors["base"] = "http_status_error"
|
||||
except httpx.InvalidURL as _:
|
||||
errors["base"] = "url_error"
|
||||
except (
|
||||
httpx.HTTPError,
|
||||
httpx.CookieConflict,
|
||||
httpx.StreamError,
|
||||
) as ex:
|
||||
_LOGGER.error("Unexpected exception: %s", ex)
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if (
|
||||
devices_response.network_id is not None
|
||||
and len(devices_response.network_id) > 0
|
||||
):
|
||||
agent_name = user_input.get(CONF_IP_ADDRESS)
|
||||
upnp_available = False
|
||||
if agent_info_response is not None:
|
||||
upnp_available = True
|
||||
agent_name = agent_info_response.agent_id
|
||||
await self.async_set_unique_id(agent_info_response.agent_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
data = {
|
||||
CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
UPNP_AVAILABLE: upnp_available,
|
||||
}
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"Fing Agent {agent_name}",
|
||||
data=data,
|
||||
)
|
||||
|
||||
return self.async_abort(reason="api_version_error")
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_IP_ADDRESS): str,
|
||||
vol.Required(CONF_PORT, default="49090"): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
),
|
||||
user_input,
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
@@ -1,4 +0,0 @@
|
||||
"""Const for the Fing integration."""
|
||||
|
||||
DOMAIN = "fing"
|
||||
UPNP_AVAILABLE = "upnp_available"
|
||||
@@ -1,85 +0,0 @@
|
||||
"""DataUpdateCoordinator for Fing integration."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from fing_agent_api import FingAgent
|
||||
from fing_agent_api.models import AgentInfoResponse, Device
|
||||
import httpx
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_IP_ADDRESS, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, UPNP_AVAILABLE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type FingConfigEntry = ConfigEntry[FingDataUpdateCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
class FingDataObject:
|
||||
"""Fing Data Object."""
|
||||
|
||||
network_id: str | None = None
|
||||
agent_info: AgentInfoResponse | None = None
|
||||
devices: dict[str, Device] = field(default_factory=dict)
|
||||
|
||||
|
||||
class FingDataUpdateCoordinator(DataUpdateCoordinator[FingDataObject]):
|
||||
"""Class to manage fetching data from Fing Agent."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: FingConfigEntry) -> None:
|
||||
"""Initialize global Fing updater."""
|
||||
self._fing = FingAgent(
|
||||
ip=config_entry.data[CONF_IP_ADDRESS],
|
||||
port=int(config_entry.data[CONF_PORT]),
|
||||
key=config_entry.data[CONF_API_KEY],
|
||||
)
|
||||
self._upnp_available = config_entry.data[UPNP_AVAILABLE]
|
||||
update_interval = timedelta(seconds=30)
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=update_interval,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> FingDataObject:
|
||||
"""Fetch data from Fing Agent."""
|
||||
device_response = None
|
||||
agent_info_response = None
|
||||
try:
|
||||
device_response = await self._fing.get_devices()
|
||||
|
||||
if self._upnp_available:
|
||||
agent_info_response = await self._fing.get_agent_info()
|
||||
|
||||
except httpx.NetworkError as err:
|
||||
raise UpdateFailed("Failed to connect") from err
|
||||
except httpx.TimeoutException as err:
|
||||
raise UpdateFailed("Timeout establishing connection") from err
|
||||
except httpx.HTTPStatusError as err:
|
||||
if err.response.status_code == 401:
|
||||
raise UpdateFailed("Invalid API key") from err
|
||||
raise UpdateFailed(
|
||||
f"Http request failed -> {err.response.status_code} - {err.response.reason_phrase}"
|
||||
) from err
|
||||
except httpx.InvalidURL as err:
|
||||
raise UpdateFailed("Invalid hostname or IP address") from err
|
||||
except (
|
||||
httpx.HTTPError,
|
||||
httpx.CookieConflict,
|
||||
httpx.StreamError,
|
||||
) as err:
|
||||
raise UpdateFailed("Unexpected error from HTTP request") from err
|
||||
else:
|
||||
return FingDataObject(
|
||||
device_response.network_id,
|
||||
agent_info_response,
|
||||
{device.mac: device for device in device_response.devices},
|
||||
)
|
||||
@@ -1,127 +0,0 @@
|
||||
"""Platform for Device tracker integration."""
|
||||
|
||||
from fing_agent_api.models import Device
|
||||
|
||||
from homeassistant.components.device_tracker import ScannerEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import FingConfigEntry
|
||||
from .coordinator import FingDataUpdateCoordinator
|
||||
from .utils import get_icon_from_type
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: FingConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add sensors for passed config_entry in HA."""
|
||||
coordinator = config_entry.runtime_data
|
||||
entity_registry = er.async_get(hass)
|
||||
tracked_devices: set[str] = set()
|
||||
|
||||
@callback
|
||||
def add_entities() -> None:
|
||||
latest_devices = set(coordinator.data.devices.keys())
|
||||
|
||||
devices_to_remove = tracked_devices - set(latest_devices)
|
||||
devices_to_add = set(latest_devices) - tracked_devices
|
||||
|
||||
entities_to_remove = []
|
||||
for entity_entry in entity_registry.entities.values():
|
||||
if entity_entry.config_entry_id != config_entry.entry_id:
|
||||
continue
|
||||
try:
|
||||
_, mac = entity_entry.unique_id.rsplit("-", 1)
|
||||
if mac in devices_to_remove:
|
||||
entities_to_remove.append(entity_entry.entity_id)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
for entity_id in entities_to_remove:
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
entities_to_add = []
|
||||
for mac_addr in devices_to_add:
|
||||
device = coordinator.data.devices[mac_addr]
|
||||
entities_to_add.append(FingTrackedDevice(coordinator, device))
|
||||
|
||||
tracked_devices.clear()
|
||||
tracked_devices.update(latest_devices)
|
||||
async_add_entities(entities_to_add)
|
||||
|
||||
add_entities()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(add_entities))
|
||||
|
||||
|
||||
class FingTrackedDevice(CoordinatorEntity[FingDataUpdateCoordinator], ScannerEntity):
|
||||
"""Represent a tracked device."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: FingDataUpdateCoordinator, device: Device) -> None:
|
||||
"""Set up FingDevice entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._device = device
|
||||
agent_id = coordinator.data.network_id
|
||||
if coordinator.data.agent_info is not None:
|
||||
agent_id = coordinator.data.agent_info.agent_id
|
||||
|
||||
self._attr_mac_address = self._device.mac
|
||||
self._attr_unique_id = f"{agent_id}-{self._attr_mac_address}"
|
||||
self._attr_name = self._device.name
|
||||
self._attr_icon = get_icon_from_type(self._device.type)
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Return true if the device is connected to the network."""
|
||||
return self._device.active
|
||||
|
||||
@property
|
||||
def ip_address(self) -> str | None:
|
||||
"""Return the primary ip address of the device."""
|
||||
return self._device.ip[0] if self._device.ip else None
|
||||
|
||||
@property
|
||||
def entity_registry_enabled_default(self) -> bool:
|
||||
"""Enable entity by default."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str | None:
|
||||
"""Return the unique ID of the entity."""
|
||||
return self._attr_unique_id
|
||||
|
||||
def check_for_updates(self, new_device: Device) -> bool:
|
||||
"""Return true if the device has updates."""
|
||||
new_device_ip = new_device.ip[0] if new_device.ip else None
|
||||
current_device_ip = self._device.ip[0] if self._device.ip else None
|
||||
|
||||
return (
|
||||
current_device_ip != new_device_ip
|
||||
or self._device.active != new_device.active
|
||||
or self._device.type != new_device.type
|
||||
or self._attr_name != new_device.name
|
||||
or self._attr_icon != get_icon_from_type(new_device.type)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
updated_device_data = self.coordinator.data.devices.get(self._device.mac)
|
||||
if updated_device_data is not None and self.check_for_updates(
|
||||
updated_device_data
|
||||
):
|
||||
self._device = updated_device_data
|
||||
self._attr_name = updated_device_data.name
|
||||
self._attr_icon = get_icon_from_type(updated_device_data.type)
|
||||
er.async_get(self.hass).async_update_entity(
|
||||
entity_id=self.entity_id,
|
||||
original_name=self._attr_name,
|
||||
original_icon=self._attr_icon,
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "fing",
|
||||
"name": "Fing",
|
||||
"codeowners": ["@Lorenzo-Gasparini"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/fing",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fing_agent_api==1.0.3"]
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: The integration has no actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow: done
|
||||
config-flow-test-coverage: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: There are no actions in Fing integration.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Fing integration entities do not use events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: The integration has no actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: The integration has no options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery: todo
|
||||
discovery-update-info: todo
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: done
|
||||
entity-category: todo
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: The integration creates only device tracker entities
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Set up Fing agent",
|
||||
"data": {
|
||||
"ip_address": "[%key:common::config_flow::data::ip%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ip_address": "IP address of the Fing agent.",
|
||||
"port": "Port number of the Fing API.",
|
||||
"api_key": "API key used to authenticate with the Fing API."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"url_error": "[%key:common::config_flow::error::invalid_host%]",
|
||||
"http_status_error": "HTTP request failed: {message}"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"api_version_error": "Your agent is using an outdated API version. The required 'network_id' parameter is missing. Please update to the latest API version."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
"""Utils functions."""
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class DeviceType(Enum):
|
||||
"""Device types enum."""
|
||||
|
||||
GENERIC = "mdi:lan-connect"
|
||||
MOBILE = PHONE = "mdi:cellphone"
|
||||
TABLET = IPOD = EREADER = "mdi:tablet"
|
||||
WATCH = WEARABLE = "mdi:watch"
|
||||
CAR = AUTOMOTIVE = "mdi:car-back"
|
||||
MEDIA_PLAYER = "mdi:volume-high"
|
||||
TELEVISION = "mdi:television"
|
||||
GAME_CONSOLE = "mdi:nintendo-game-boy"
|
||||
STREAMING_DONGLE = "mdi:cast"
|
||||
LOUDSPEAKER = SOUND_SYSTEM = STB = SATELLITE = MUSIC = "mdi:speaker"
|
||||
DISC_PLAYER = "mdi:disk-player"
|
||||
REMOTE_CONTROL = "mdi:remote-tv"
|
||||
RADIO = "mdi:radio"
|
||||
PHOTO_CAMERA = PHOTOS = "mdi:camera"
|
||||
MICROPHONE = VOICE_CONTROL = "mdi:microphone"
|
||||
PROJECTOR = "mdi:projector"
|
||||
COMPUTER = DESKTOP = "mdi:desktop-tower"
|
||||
LAPTOP = "mdi:laptop"
|
||||
PRINTER = "mdi:printer"
|
||||
SCANNER = "mdi:scanner"
|
||||
POS = "mdi:printer-pos"
|
||||
CLOCK = "mdi:clock"
|
||||
BARCODE = "mdi:barcode"
|
||||
SURVEILLANCE_CAMERA = BABY_MONITOR = PET_MONITOR = "mdi:cctv"
|
||||
POE_PLUG = HEALTH_MONITOR = SMART_HOME = SMART_METER = APPLIANCE = SLEEP = (
|
||||
"mdi:home-automation"
|
||||
)
|
||||
SMART_PLUG = "mdi:power-plug"
|
||||
LIGHT = "mdi:lightbulb"
|
||||
THERMOSTAT = HEATING = "mdi:home-thermometer"
|
||||
POWER_SYSTEM = ENERGY = "mdi:lightning-bolt"
|
||||
SOLAR_PANEL = "mdi:solar-power"
|
||||
WASHER = "mdi:washing-machine"
|
||||
FRIDGE = "mdi:fridge"
|
||||
CLEANER = "mdi:vacuum"
|
||||
GARAGE = "mdi:garage"
|
||||
SPRINKLER = "mdi:sprinkler"
|
||||
BELL = "mdi:doorbell"
|
||||
KEY_LOCK = "mdi:lock-smart"
|
||||
CONTROL_PANEL = SMART_CONTROLLER = "mdi:alarm-panel"
|
||||
SCALE = "mdi:scale-bathroom"
|
||||
TOY = "mdi:teddy-bear"
|
||||
ROBOT = "mdi:robot"
|
||||
WEATHER = "mdi:weather-cloudy"
|
||||
ALARM = "mdi:alarm-light"
|
||||
MOTION_DETECTOR = "mdi:motion-sensor"
|
||||
SMOKE = HUMIDITY = SENSOR = DOMOTZ_BOX = FINGBOX = "mdi:smoke-detector"
|
||||
ROUTER = MODEM = GATEWAY = FIREWALL = VPN = SMALL_CELL = "mdi:router-network"
|
||||
WIFI = WIFI_EXTENDER = "mdi:wifi"
|
||||
NAS_STORAGE = "mdi:nas"
|
||||
SWITCH = "mdi:switch"
|
||||
USB = "mdi:usb"
|
||||
CLOUD = "mdi:cloud"
|
||||
BATTERY = "mdi:battery"
|
||||
NETWORK_APPLIANCE = "mdi:network"
|
||||
VIRTUAL_MACHINE = MAIL_SERVER = FILE_SERVER = PROXY_SERVER = WEB_SERVER = (
|
||||
DOMAIN_SERVER
|
||||
) = COMMUNICATION = "mdi:monitor"
|
||||
SERVER = "mdi:server"
|
||||
TERMINAL = "mdi:console"
|
||||
DATABASE = "mdi:database"
|
||||
RASPBERRY = ARDUINO = "mdi:raspberry-pi"
|
||||
PROCESSOR = CIRCUIT_CARD = RFID = "mdi:chip"
|
||||
INDUSTRIAL = "mdi:factory"
|
||||
MEDICAL = "mdi:medical-bag"
|
||||
VOIP = CONFERENCING = "mdi:phone-voip"
|
||||
FITNESS = "mdi:dumbbell"
|
||||
POOL = "mdi:pool"
|
||||
SECURITY_SYSTEM = "mdi:security"
|
||||
|
||||
|
||||
def get_icon_from_type(type: str) -> str:
|
||||
"""Return the right icon based on the type."""
|
||||
try:
|
||||
return DeviceType[type].value
|
||||
except (ValueError, KeyError):
|
||||
return "mdi:lan-connect"
|
||||
@@ -15,13 +15,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_RECOMMENDED,
|
||||
LOGGER,
|
||||
RECOMMENDED_A_TASK_MAX_TOKENS,
|
||||
RECOMMENDED_IMAGE_MODEL,
|
||||
)
|
||||
from .const import CONF_CHAT_MODEL, CONF_RECOMMENDED, LOGGER, RECOMMENDED_IMAGE_MODEL
|
||||
from .entity import (
|
||||
ERROR_GETTING_RESPONSE,
|
||||
GoogleGenerativeAILLMBaseEntity,
|
||||
@@ -79,9 +73,7 @@ class GoogleGenerativeAITaskEntity(
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenDataTaskResult:
|
||||
"""Handle a generate data task."""
|
||||
await self._async_handle_chat_log(
|
||||
chat_log, task.structure, default_max_tokens=RECOMMENDED_A_TASK_MAX_TOKENS
|
||||
)
|
||||
await self._async_handle_chat_log(chat_log, task.structure)
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
LOGGER.error(
|
||||
|
||||
@@ -32,8 +32,6 @@ CONF_TOP_K = "top_k"
|
||||
RECOMMENDED_TOP_K = 64
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
# Input 5000, output 19400 = 0.05 USD
|
||||
RECOMMENDED_A_TASK_MAX_TOKENS = 19400
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD = "harassment_block_threshold"
|
||||
CONF_HATE_BLOCK_THRESHOLD = "hate_block_threshold"
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD = "sexual_block_threshold"
|
||||
|
||||
@@ -472,7 +472,6 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
structure: vol.Schema | None = None,
|
||||
default_max_tokens: int | None = None,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
@@ -619,9 +618,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
|
||||
def create_generate_content_config(
|
||||
self, default_max_tokens: int | None = None
|
||||
) -> GenerateContentConfig:
|
||||
def create_generate_content_config(self) -> GenerateContentConfig:
|
||||
"""Create the GenerateContentConfig for the LLM."""
|
||||
options = self.subentry.data
|
||||
model = options.get(CONF_CHAT_MODEL, self.default_model)
|
||||
@@ -635,12 +632,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
top_k=options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
top_p=options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
max_output_tokens=options.get(
|
||||
CONF_MAX_TOKENS,
|
||||
default_max_tokens
|
||||
if default_max_tokens is not None
|
||||
else RECOMMENDED_MAX_TOKENS,
|
||||
),
|
||||
max_output_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
safety_settings=[
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
|
||||
@@ -109,8 +109,6 @@ DATA_KEY_HOST = "host"
|
||||
DATA_KEY_SUPERVISOR_ISSUES = "supervisor_issues"
|
||||
|
||||
PLACEHOLDER_KEY_ADDON = "addon"
|
||||
PLACEHOLDER_KEY_ADDON_INFO = "addon_info"
|
||||
PLACEHOLDER_KEY_ADDON_DOCUMENTATION = "addon_documentation"
|
||||
PLACEHOLDER_KEY_ADDON_URL = "addon_url"
|
||||
PLACEHOLDER_KEY_REFERENCE = "reference"
|
||||
PLACEHOLDER_KEY_COMPONENTS = "components"
|
||||
@@ -122,7 +120,6 @@ ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING = "issue_addon_detached_addon_missing"
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED = "issue_addon_detached_addon_removed"
|
||||
ISSUE_KEY_ADDON_PWNED = "issue_addon_pwned"
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE = "issue_system_free_space"
|
||||
ISSUE_KEY_ADDON_DEPRECATED = "issue_addon_deprecated_addon"
|
||||
|
||||
CORE_CONTAINER = "homeassistant"
|
||||
SUPERVISOR_CONTAINER = "hassio_supervisor"
|
||||
@@ -159,7 +156,6 @@ EXTRA_PLACEHOLDERS = {
|
||||
ISSUE_KEY_ADDON_PWNED: {
|
||||
"more_info_pwned": "https://www.home-assistant.io/more-info/pwned-passwords",
|
||||
},
|
||||
ISSUE_KEY_ADDON_DEPRECATED: HELP_URLS,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,6 @@ from .const import (
|
||||
EVENT_SUPPORTED_CHANGED,
|
||||
EXTRA_PLACEHOLDERS,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
@@ -85,7 +84,6 @@ ISSUE_KEYS_FOR_REPAIRS = {
|
||||
"issue_system_disk_lifetime",
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -18,13 +18,10 @@ from . import get_addons_info, get_issues_info
|
||||
from .const import (
|
||||
EXTRA_PLACEHOLDERS,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
|
||||
PLACEHOLDER_KEY_ADDON,
|
||||
PLACEHOLDER_KEY_ADDON_DOCUMENTATION,
|
||||
PLACEHOLDER_KEY_ADDON_INFO,
|
||||
PLACEHOLDER_KEY_COMPONENTS,
|
||||
PLACEHOLDER_KEY_REFERENCE,
|
||||
)
|
||||
@@ -198,23 +195,6 @@ class AddonIssueRepairFlow(SupervisorIssueRepairFlow):
|
||||
return placeholders or None
|
||||
|
||||
|
||||
class DeprecatedAddonIssueRepairFlow(AddonIssueRepairFlow):
|
||||
"""Handler for deprecated addon issue fixing flows."""
|
||||
|
||||
@property
|
||||
def description_placeholders(self) -> dict[str, str] | None:
|
||||
"""Get description placeholders for steps."""
|
||||
placeholders: dict[str, str] = super().description_placeholders or {}
|
||||
if self.issue and self.issue.reference:
|
||||
placeholders[PLACEHOLDER_KEY_ADDON_INFO] = (
|
||||
f"homeassistant://hassio/addon/{self.issue.reference}/info"
|
||||
)
|
||||
placeholders[PLACEHOLDER_KEY_ADDON_DOCUMENTATION] = (
|
||||
f"homeassistant://hassio/addon/{self.issue.reference}/documentation"
|
||||
)
|
||||
return placeholders or None
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
@@ -225,8 +205,6 @@ async def async_create_fix_flow(
|
||||
issue = supervisor_issues and supervisor_issues.get_issue(issue_id)
|
||||
if issue and issue.key == ISSUE_KEY_SYSTEM_DOCKER_CONFIG:
|
||||
return DockerConfigIssueRepairFlow(hass, issue_id)
|
||||
if issue and issue.key == ISSUE_KEY_ADDON_DEPRECATED:
|
||||
return DeprecatedAddonIssueRepairFlow(hass, issue_id)
|
||||
if issue and issue.key in {
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
|
||||
@@ -56,19 +56,6 @@
|
||||
"title": "Insecure secrets detected in add-on configuration",
|
||||
"description": "Add-on {addon} uses secrets/passwords in its configuration which are detected as not secure. See [pwned passwords and secrets]({more_info_pwned}) for more information on this issue."
|
||||
},
|
||||
"issue_addon_deprecated_addon": {
|
||||
"title": "Installed add-on is deprecated",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"addon_execute_remove": {
|
||||
"description": "Add-on {addon} is marked deprecated by the developer. This means it is no longer being maintained and so may break or become a security issue over time.\n\nReview the [readme]({addon_info}) and [documentation]({addon_documentation}) of the add-on to see if the developer provided instructions.\n\nSelecting **Submit** will uninstall this deprecated add-on. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"apply_suggestion_fail": "Could not uninstall the add-on. Check the Supervisor logs for more details."
|
||||
}
|
||||
}
|
||||
},
|
||||
"issue_mount_mount_failed": {
|
||||
"title": "Network storage device failed",
|
||||
"fix_flow": {
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.83", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.82", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import CONFIG_DEFAULT_MAX_TEMP, CONFIG_DEFAULT_MIN_TEMP
|
||||
from .coordinator import HuumConfigEntry, HuumDataUpdateCoordinator
|
||||
from .entity import HuumBaseEntity
|
||||
|
||||
@@ -56,12 +55,12 @@ class HuumDevice(HuumBaseEntity, ClimateEntity):
|
||||
@property
|
||||
def min_temp(self) -> int:
|
||||
"""Return configured minimal temperature."""
|
||||
return self.coordinator.data.sauna_config.min_temp or CONFIG_DEFAULT_MIN_TEMP
|
||||
return self.coordinator.data.sauna_config.min_temp
|
||||
|
||||
@property
|
||||
def max_temp(self) -> int:
|
||||
"""Return configured maximum temperature."""
|
||||
return self.coordinator.data.sauna_config.max_temp or CONFIG_DEFAULT_MAX_TEMP
|
||||
return self.coordinator.data.sauna_config.max_temp
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
|
||||
@@ -9,6 +9,3 @@ PLATFORMS = [Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.LIGHT, Platform.
|
||||
CONFIG_STEAMER = 1
|
||||
CONFIG_LIGHT = 2
|
||||
CONFIG_STEAMER_AND_LIGHT = 3
|
||||
|
||||
CONFIG_DEFAULT_MIN_TEMP = 40
|
||||
CONFIG_DEFAULT_MAX_TEMP = 110
|
||||
|
||||
@@ -5,17 +5,11 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
brightness_supported,
|
||||
)
|
||||
from homeassistant.components.light import ColorMode, LightEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.color import brightness_to_value, value_to_brightness
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LunatoneConfigEntry, LunatoneDevicesDataUpdateCoordinator
|
||||
@@ -48,10 +42,8 @@ class LunatoneLight(
|
||||
):
|
||||
"""Representation of a Lunatone light."""
|
||||
|
||||
BRIGHTNESS_SCALE = (1, 100)
|
||||
|
||||
_last_brightness = 255
|
||||
|
||||
_attr_color_mode = ColorMode.ONOFF
|
||||
_attr_supported_color_modes = {ColorMode.ONOFF}
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_should_poll = False
|
||||
@@ -90,25 +82,6 @@ class LunatoneLight(
|
||||
"""Return True if light is on."""
|
||||
return self._device is not None and self._device.is_on
|
||||
|
||||
@property
|
||||
def brightness(self) -> int:
|
||||
"""Return the brightness of this light between 0..255."""
|
||||
if self._device is None:
|
||||
return 0
|
||||
return value_to_brightness(self.BRIGHTNESS_SCALE, self._device.brightness)
|
||||
|
||||
@property
|
||||
def color_mode(self) -> ColorMode:
|
||||
"""Return the color mode of the light."""
|
||||
if self._device is not None and self._device.is_dimmable:
|
||||
return ColorMode.BRIGHTNESS
|
||||
return ColorMode.ONOFF
|
||||
|
||||
@property
|
||||
def supported_color_modes(self) -> set[ColorMode]:
|
||||
"""Return the supported color modes."""
|
||||
return {self.color_mode}
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
@@ -118,27 +91,13 @@ class LunatoneLight(
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn on."""
|
||||
assert self._device
|
||||
|
||||
if brightness_supported(self.supported_color_modes):
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS, self._last_brightness)
|
||||
await self._device.fade_to_brightness(
|
||||
brightness_to_value(self.BRIGHTNESS_SCALE, brightness)
|
||||
)
|
||||
else:
|
||||
await self._device.switch_on()
|
||||
|
||||
await self._device.switch_on()
|
||||
await asyncio.sleep(STATUS_UPDATE_DELAY)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn off."""
|
||||
assert self._device
|
||||
|
||||
if brightness_supported(self.supported_color_modes):
|
||||
self._last_brightness = self.brightness
|
||||
await self._device.fade_to_brightness(0)
|
||||
else:
|
||||
await self._device.switch_off()
|
||||
|
||||
await self._device.switch_off()
|
||||
await asyncio.sleep(STATUS_UPDATE_DELAY)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@@ -123,9 +123,6 @@
|
||||
"evse_fault_state": {
|
||||
"default": "mdi:ev-station"
|
||||
},
|
||||
"operational_error": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"pump_control_mode": {
|
||||
"default": "mdi:pipe-wrench"
|
||||
},
|
||||
|
||||
@@ -86,14 +86,6 @@ OPERATIONAL_STATE_MAP = {
|
||||
clusters.OperationalState.Enums.OperationalStateEnum.kError: "error",
|
||||
}
|
||||
|
||||
OPERATIONAL_STATE_ERROR_MAP = {
|
||||
# enum with known Error state values which we can translate
|
||||
clusters.OperationalState.Enums.ErrorStateEnum.kNoError: "no_error",
|
||||
clusters.OperationalState.Enums.ErrorStateEnum.kUnableToStartOrResume: "unable_to_start_or_resume",
|
||||
clusters.OperationalState.Enums.ErrorStateEnum.kUnableToCompleteOperation: "unable_to_complete_operation",
|
||||
clusters.OperationalState.Enums.ErrorStateEnum.kCommandInvalidInState: "command_invalid_in_state",
|
||||
}
|
||||
|
||||
RVC_OPERATIONAL_STATE_MAP = {
|
||||
# enum with known Operation state values which we can translate
|
||||
**OPERATIONAL_STATE_MAP,
|
||||
@@ -102,29 +94,6 @@ RVC_OPERATIONAL_STATE_MAP = {
|
||||
clusters.RvcOperationalState.Enums.OperationalStateEnum.kDocked: "docked",
|
||||
}
|
||||
|
||||
RVC_OPERATIONAL_STATE_ERROR_MAP = {
|
||||
# enum with known Error state values which we can translate
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kNoError: "no_error",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kUnableToStartOrResume: "unable_to_start_or_resume",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kUnableToCompleteOperation: "unable_to_complete_operation",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kCommandInvalidInState: "command_invalid_in_state",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kFailedToFindChargingDock: "failed_to_find_charging_dock",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kStuck: "stuck",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kDustBinMissing: "dust_bin_missing",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kDustBinFull: "dust_bin_full",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kWaterTankEmpty: "water_tank_empty",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kWaterTankMissing: "water_tank_missing",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kWaterTankLidOpen: "water_tank_lid_open",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kMopCleaningPadMissing: "mop_cleaning_pad_missing",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kLowBattery: "low_battery",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kCannotReachTargetArea: "cannot_reach_target_area",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kDirtyWaterTankFull: "dirty_water_tank_full",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kDirtyWaterTankMissing: "dirty_water_tank_missing",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kWheelsJammed: "wheels_jammed",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kBrushJammed: "brush_jammed",
|
||||
clusters.RvcOperationalState.Enums.ErrorStateEnum.kNavigationSensorObscured: "navigation_sensor_obscured",
|
||||
}
|
||||
|
||||
BOOST_STATE_MAP = {
|
||||
clusters.WaterHeaterManagement.Enums.BoostStateEnum.kInactive: "inactive",
|
||||
clusters.WaterHeaterManagement.Enums.BoostStateEnum.kActive: "active",
|
||||
@@ -1132,19 +1101,6 @@ DISCOVERY_SCHEMAS = [
|
||||
# don't discover this entry if the supported state list is empty
|
||||
secondary_value_is_not=[],
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterSensorEntityDescription(
|
||||
key="OperationalStateOperationalError",
|
||||
translation_key="operational_error",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=list(OPERATIONAL_STATE_ERROR_MAP.values()),
|
||||
device_to_ha=lambda x: OPERATIONAL_STATE_ERROR_MAP.get(x.errorStateID),
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.OperationalState.Attributes.OperationalError,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterListSensorEntityDescription(
|
||||
@@ -1189,19 +1145,6 @@ DISCOVERY_SCHEMAS = [
|
||||
device_type=(device_types.Thermostat,),
|
||||
allow_multi=True, # also used for climate entity
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterSensorEntityDescription(
|
||||
key="ThermostatPIHeatingDemand",
|
||||
translation_key="pi_heating_demand",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.Thermostat.Attributes.PIHeatingDemand,),
|
||||
device_type=(device_types.Thermostat,),
|
||||
featuremap_contains=clusters.Thermostat.Bitmaps.Feature.kHeating,
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterSensorEntityDescription(
|
||||
@@ -1238,19 +1181,6 @@ DISCOVERY_SCHEMAS = [
|
||||
# don't discover this entry if the supported state list is empty
|
||||
secondary_value_is_not=[],
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterSensorEntityDescription(
|
||||
key="RvcOperationalStateOperationalError",
|
||||
translation_key="operational_error",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=list(RVC_OPERATIONAL_STATE_ERROR_MAP.values()),
|
||||
device_to_ha=lambda x: RVC_OPERATIONAL_STATE_ERROR_MAP.get(x.errorStateID),
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.RvcOperationalState.Attributes.OperationalError,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterOperationalStateSensorEntityDescription(
|
||||
|
||||
@@ -441,33 +441,6 @@
|
||||
"evse_soc": {
|
||||
"name": "State of charge"
|
||||
},
|
||||
"operational_error": {
|
||||
"name": "Operational error",
|
||||
"state": {
|
||||
"no_error": "No error",
|
||||
"unable_to_start_or_resume": "Unable to start or resume",
|
||||
"unable_to_complete_operation": "Unable to complete operation",
|
||||
"command_invalid_in_state": "Command invalid in current state",
|
||||
"failed_to_find_charging_dock": "Failed to find charging dock",
|
||||
"stuck": "Stuck",
|
||||
"dust_bin_missing": "Dust bin missing",
|
||||
"dust_bin_full": "Dust bin full",
|
||||
"water_tank_empty": "Water tank empty",
|
||||
"water_tank_missing": "Water tank missing",
|
||||
"water_tank_lid_open": "Water tank lid open",
|
||||
"mop_cleaning_pad_missing": "Mop cleaning pad missing",
|
||||
"low_battery": "Low battery",
|
||||
"cannot_reach_target_area": "Cannot reach target area",
|
||||
"dirty_water_tank_full": "Dirty water tank full",
|
||||
"dirty_water_tank_missing": "Dirty water tank missing",
|
||||
"wheels_jammed": "Wheels jammed",
|
||||
"brush_jammed": "Brush jammed",
|
||||
"navigation_sensor_obscured": "Navigation sensor obscured"
|
||||
}
|
||||
},
|
||||
"pi_heating_demand": {
|
||||
"name": "Heating demand"
|
||||
},
|
||||
"nitrogen_dioxide": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]"
|
||||
},
|
||||
|
||||
@@ -270,13 +270,10 @@ class MetOfficeWeather(
|
||||
self.forecast_coordinators["daily"],
|
||||
)
|
||||
timesteps = coordinator.data.timesteps
|
||||
start_datetime = datetime.now(tz=timesteps[0]["time"].tzinfo).replace(
|
||||
hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
return [
|
||||
_build_daily_forecast_data(timestep)
|
||||
for timestep in timesteps
|
||||
if timestep["time"] >= start_datetime
|
||||
if timestep["time"] > datetime.now(tz=timesteps[0]["time"].tzinfo)
|
||||
]
|
||||
|
||||
@callback
|
||||
@@ -288,13 +285,10 @@ class MetOfficeWeather(
|
||||
)
|
||||
|
||||
timesteps = coordinator.data.timesteps
|
||||
start_datetime = datetime.now(tz=timesteps[0]["time"].tzinfo).replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
)
|
||||
return [
|
||||
_build_hourly_forecast_data(timestep)
|
||||
for timestep in timesteps
|
||||
if timestep["time"] >= start_datetime
|
||||
if timestep["time"] > datetime.now(tz=timesteps[0]["time"].tzinfo)
|
||||
]
|
||||
|
||||
@callback
|
||||
@@ -305,11 +299,8 @@ class MetOfficeWeather(
|
||||
self.forecast_coordinators["twice_daily"],
|
||||
)
|
||||
timesteps = coordinator.data.timesteps
|
||||
start_datetime = datetime.now(tz=timesteps[0]["time"].tzinfo).replace(
|
||||
hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
return [
|
||||
_build_twice_daily_forecast_data(timestep)
|
||||
for timestep in timesteps
|
||||
if timestep["time"] >= start_datetime
|
||||
if timestep["time"] > datetime.now(tz=timesteps[0]["time"].tzinfo)
|
||||
]
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .const import _LOGGER
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SCENE]
|
||||
PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT]
|
||||
|
||||
type NikoHomeControlConfigEntry = ConfigEntry[NHCController]
|
||||
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
"""Scene Platform for Niko Home Control."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.scene import BaseScene
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import NikoHomeControlConfigEntry
|
||||
from .entity import NikoHomeControlEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: NikoHomeControlConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Niko Home Control scene entry."""
|
||||
controller = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
NikoHomeControlScene(scene, controller, entry.entry_id)
|
||||
for scene in controller.scenes
|
||||
)
|
||||
|
||||
|
||||
class NikoHomeControlScene(NikoHomeControlEntity, BaseScene):
|
||||
"""Representation of a Niko Home Control Scene."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
async def _async_activate(self, **kwargs: Any) -> None:
|
||||
"""Activate scene. Try to get entities into requested state."""
|
||||
await self._action.activate()
|
||||
|
||||
def update_state(self) -> None:
|
||||
"""Update HA state."""
|
||||
self._async_record_activation()
|
||||
@@ -110,8 +110,6 @@ async def _create_webhook(
|
||||
translation_placeholders={
|
||||
"base_url": hass_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
"sample_ip": "192.168.1.10",
|
||||
"sample_url": "http://192.168.1.10:8123",
|
||||
},
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -177,5 +177,4 @@ class NukiConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(data_schema, user_input),
|
||||
errors=errors,
|
||||
description_placeholders={"sample_ip": "192.168.1.25"},
|
||||
)
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"encrypt_token": "Use an encrypted token for authentication."
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Nuki bridge. For example: {sample_ip}."
|
||||
"host": "The hostname or IP address of your Nuki bridge. For example: 192.168.1.25."
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
@@ -34,7 +34,7 @@
|
||||
"issues": {
|
||||
"https_webhook": {
|
||||
"title": "Nuki webhook URL uses HTTPS (SSL)",
|
||||
"description": "The Nuki bridge cannot push events to an HTTPS address (SSL), please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}). The current (local) address is: `{base_url}`, a valid address could, for example, be `{sample_url}` where `{sample_ip}` is the IP of the Home Assistant device"
|
||||
"description": "The Nuki bridge can not push events to an HTTPS address (SSL), please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}). The current (local) address is: `{base_url}`, a valid address could, for example, be `http://192.168.1.10:8123` where `192.168.1.10` is the IP of the Home Assistant device"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -56,13 +56,7 @@ def ensure_valid_path(value):
|
||||
return value
|
||||
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CAMERA,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CAMERA, Platform.SENSOR]
|
||||
DEFAULT_NAME = "OctoPrint"
|
||||
CONF_NUMBER_OF_TOOLS = "number_of_tools"
|
||||
CONF_BED = "bed"
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
"""Support for OctoPrint number entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from pyoctoprintapi import OctoprintClient
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import OctoprintDataUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_bed(tool_name: str) -> bool:
|
||||
"""Return True if the tool name indicates a bed."""
|
||||
return tool_name == "bed"
|
||||
|
||||
|
||||
def is_extruder(tool_name: str) -> bool:
|
||||
"""Return True if the tool name indicates an extruder."""
|
||||
return tool_name.startswith("tool") and tool_name[4:].isdigit()
|
||||
|
||||
|
||||
def is_first_extruder(tool_name: str) -> bool:
|
||||
"""Return True if the tool name indicates the first extruder."""
|
||||
return tool_name == "tool0"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the OctoPrint number entities."""
|
||||
coordinator: OctoprintDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]["coordinator"]
|
||||
client: OctoprintClient = hass.data[DOMAIN][config_entry.entry_id]["client"]
|
||||
device_id = config_entry.unique_id
|
||||
|
||||
assert device_id is not None
|
||||
|
||||
known_tools = set()
|
||||
|
||||
@callback
|
||||
def async_add_tool_numbers() -> None:
|
||||
if not coordinator.data["printer"]:
|
||||
return
|
||||
|
||||
new_numbers: list[OctoPrintTemperatureNumber] = []
|
||||
for tool in coordinator.data["printer"].temperatures:
|
||||
if (
|
||||
is_extruder(tool.name) or is_bed(tool.name)
|
||||
) and tool.name not in known_tools:
|
||||
assert device_id is not None
|
||||
known_tools.add(tool.name)
|
||||
new_numbers.append(
|
||||
OctoPrintTemperatureNumber(
|
||||
coordinator,
|
||||
client,
|
||||
tool.name,
|
||||
device_id,
|
||||
)
|
||||
)
|
||||
async_add_entities(new_numbers)
|
||||
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(async_add_tool_numbers))
|
||||
|
||||
if coordinator.data["printer"]:
|
||||
async_add_tool_numbers()
|
||||
|
||||
|
||||
class OctoPrintTemperatureNumber(
|
||||
CoordinatorEntity[OctoprintDataUpdateCoordinator], NumberEntity
|
||||
):
|
||||
"""Representation of an OctoPrint temperature setter entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
|
||||
_attr_native_min_value = 0
|
||||
_attr_native_max_value = 300
|
||||
_attr_native_step = 1
|
||||
_attr_mode = NumberMode.BOX
|
||||
_attr_device_class = NumberDeviceClass.TEMPERATURE
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: OctoprintDataUpdateCoordinator,
|
||||
client: OctoprintClient,
|
||||
tool: str,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize a new OctoPrint temperature number entity."""
|
||||
super().__init__(coordinator)
|
||||
self._api_tool = tool
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{device_id}_{tool}_temperature"
|
||||
self._client = client
|
||||
self._device_id = device_id
|
||||
if is_bed(tool):
|
||||
self._attr_translation_key = "bed_temperature"
|
||||
elif is_first_extruder(tool):
|
||||
self._attr_translation_key = "extruder_temperature"
|
||||
else:
|
||||
self._attr_translation_key = "extruder_n_temperature"
|
||||
self._attr_translation_placeholders = {"n": tool[4:]}
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the current target temperature."""
|
||||
if not self.coordinator.data["printer"]:
|
||||
return None
|
||||
for tool in self.coordinator.data["printer"].temperatures:
|
||||
if tool.name == self._api_tool and tool.target_temp is not None:
|
||||
return tool.target_temp
|
||||
|
||||
return None
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the target temperature."""
|
||||
|
||||
try:
|
||||
if is_bed(self._api_tool):
|
||||
await self._client.set_bed_temperature(int(value))
|
||||
elif is_extruder(self._api_tool):
|
||||
await self._client.set_tool_temperature(self._api_tool, int(value))
|
||||
except Exception as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_setting_temperature",
|
||||
translation_placeholders={
|
||||
"tool": self._api_tool,
|
||||
},
|
||||
) from err
|
||||
|
||||
# Request coordinator update to reflect the change
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -36,23 +36,7 @@
|
||||
"get_api_key": "Open the OctoPrint UI and select **Allow** on the Access Request for **Home Assistant**."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"number": {
|
||||
"bed_temperature": {
|
||||
"name": "Bed temperature"
|
||||
},
|
||||
"extruder_temperature": {
|
||||
"name": "Extruder temperature"
|
||||
},
|
||||
"extruder_n_temperature": {
|
||||
"name": "Extruder {n} temperature"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"error_setting_temperature": {
|
||||
"message": "Error setting target {tool} temperature"
|
||||
},
|
||||
"missing_client": {
|
||||
"message": "No client for device ID: {device_id}"
|
||||
}
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["plugwise"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["plugwise==1.8.2"],
|
||||
"requirements": ["plugwise==1.8.1"],
|
||||
"zeroconf": ["_plugwise._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -142,5 +142,5 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.entity_description.state_fn(
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_name]
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
||||
)
|
||||
|
||||
@@ -95,7 +95,12 @@ class PortainerButton(PortainerContainerEntity, ButtonEntity):
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
device_identifier = (
|
||||
self._device_info.names[0].replace("/", " ").strip()
|
||||
if self._device_info.names
|
||||
else None
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_identifier}_{entity_description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Trigger the Portainer button press service."""
|
||||
|
||||
@@ -144,10 +144,7 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
id=endpoint.id,
|
||||
name=endpoint.name,
|
||||
endpoint=endpoint,
|
||||
containers={
|
||||
container.names[0].replace("/", " ").strip(): container
|
||||
for container in containers
|
||||
},
|
||||
containers={container.id: container for container in containers},
|
||||
docker_version=docker_version,
|
||||
docker_info=docker_info,
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/portainer",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.7"]
|
||||
"requirements": ["pyportainer==1.0.4"]
|
||||
}
|
||||
|
||||
@@ -212,7 +212,7 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_name]
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -123,7 +123,7 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the device."""
|
||||
return self.entity_description.is_on_fn(
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_name]
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -8,15 +8,11 @@ import logging
|
||||
from pyprobeplus import ProbePlusDevice
|
||||
from pyprobeplus.exceptions import ProbePlusDeviceNotFound, ProbePlusError
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
type ProbePlusConfigEntry = ConfigEntry[ProbePlusDataUpdateCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -43,17 +39,8 @@ class ProbePlusDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
available_scanners = bluetooth.async_scanner_count(hass, connectable=True)
|
||||
|
||||
if available_scanners == 0:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_bleak_scanner",
|
||||
)
|
||||
|
||||
self.device: ProbePlusDevice = ProbePlusDevice(
|
||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||
scanner=bluetooth.async_get_scanner(hass),
|
||||
name=entry.title,
|
||||
notify_callback=self.async_update_listeners,
|
||||
)
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyprobeplus==1.1.2"]
|
||||
"requirements": ["pyprobeplus==1.1.1"]
|
||||
}
|
||||
|
||||
@@ -45,10 +45,5 @@
|
||||
"name": "Relay voltage"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"no_bleak_scanner": {
|
||||
"message": "No compatible Bluetooth scanner found."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/subaru",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["stdiomask", "subarulink"],
|
||||
"requirements": ["subarulink==0.7.15"]
|
||||
"requirements": ["subarulink==0.7.13"]
|
||||
}
|
||||
|
||||
@@ -41,5 +41,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["switchbot"],
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["PySwitchbot==0.72.0"]
|
||||
"requirements": ["PySwitchbot==0.71.0"]
|
||||
}
|
||||
|
||||
@@ -39,11 +39,13 @@ BASE_BINARY_SENSOR_TYPES: tuple[SystemBridgeBinarySensorEntityDescription, ...]
|
||||
SystemBridgeBinarySensorEntityDescription(
|
||||
key="camera_in_use",
|
||||
translation_key="camera_in_use",
|
||||
icon="mdi:webcam",
|
||||
value_fn=camera_in_use,
|
||||
),
|
||||
SystemBridgeBinarySensorEntityDescription(
|
||||
key="pending_reboot",
|
||||
translation_key="pending_reboot",
|
||||
icon="mdi:restart",
|
||||
value_fn=lambda data: data.system.pending_reboot,
|
||||
),
|
||||
SystemBridgeBinarySensorEntityDescription(
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DATA_WAIT_TIMEOUT, DOMAIN, SYNTAX_KEYS_DOCUMENTATION_URL
|
||||
from .const import DATA_WAIT_TIMEOUT, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -132,11 +132,7 @@ class SystemBridgeConfigFlow(
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
description_placeholders={
|
||||
"syntax_keys_documentation_url": SYNTAX_KEYS_DOCUMENTATION_URL
|
||||
},
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA
|
||||
)
|
||||
|
||||
errors, info = await _async_get_info(self.hass, user_input)
|
||||
@@ -148,12 +144,7 @@ class SystemBridgeConfigFlow(
|
||||
return self.async_create_entry(title=info["hostname"], data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"syntax_keys_documentation_url": SYNTAX_KEYS_DOCUMENTATION_URL
|
||||
},
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_authenticate(
|
||||
@@ -183,10 +174,7 @@ class SystemBridgeConfigFlow(
|
||||
return self.async_show_form(
|
||||
step_id="authenticate",
|
||||
data_schema=STEP_AUTHENTICATE_DATA_SCHEMA,
|
||||
description_placeholders={
|
||||
"name": self._name,
|
||||
"syntax_keys_documentation_url": SYNTAX_KEYS_DOCUMENTATION_URL,
|
||||
},
|
||||
description_placeholders={"name": self._name},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -4,8 +4,6 @@ from typing import Final
|
||||
|
||||
from systembridgemodels.modules import Module
|
||||
|
||||
SYNTAX_KEYS_DOCUMENTATION_URL = "http://robotjs.io/docs/syntax#keys"
|
||||
|
||||
DOMAIN = "system_bridge"
|
||||
|
||||
MODULES: Final[list[Module]] = [
|
||||
|
||||
@@ -1,60 +1,4 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"camera_in_use": {
|
||||
"default": "mdi:webcam"
|
||||
},
|
||||
"pending_reboot": {
|
||||
"default": "mdi:restart"
|
||||
}
|
||||
},
|
||||
"media_player": {
|
||||
"media": {
|
||||
"default": "mdi:volume-high"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"boot_time": {
|
||||
"default": "mdi:av-timer"
|
||||
},
|
||||
"cpu_power_package": {
|
||||
"default": "mdi:chip"
|
||||
},
|
||||
"cpu_speed": {
|
||||
"default": "mdi:speedometer"
|
||||
},
|
||||
"displays_connected": {
|
||||
"default": "mdi:monitor"
|
||||
},
|
||||
"kernel": {
|
||||
"default": "mdi:devices"
|
||||
},
|
||||
"load": {
|
||||
"default": "mdi:percent"
|
||||
},
|
||||
"memory_free": {
|
||||
"default": "mdi:memory"
|
||||
},
|
||||
"memory_used": {
|
||||
"default": "mdi:memory"
|
||||
},
|
||||
"os": {
|
||||
"default": "mdi:devices"
|
||||
},
|
||||
"power_usage": {
|
||||
"default": "mdi:power-plug"
|
||||
},
|
||||
"processes": {
|
||||
"default": "mdi:counter"
|
||||
},
|
||||
"version": {
|
||||
"default": "mdi:counter"
|
||||
},
|
||||
"version_latest": {
|
||||
"default": "mdi:counter"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_process_by_id": {
|
||||
"service": "mdi:console"
|
||||
|
||||
@@ -57,6 +57,7 @@ MEDIA_PLAYER_DESCRIPTION: Final[MediaPlayerEntityDescription] = (
|
||||
MediaPlayerEntityDescription(
|
||||
key="media",
|
||||
translation_key="media",
|
||||
icon="mdi:volume-high",
|
||||
device_class=MediaPlayerDeviceClass.RECEIVER,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -233,6 +233,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = (
|
||||
key="boot_time",
|
||||
translation_key="boot_time",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
icon="mdi:av-timer",
|
||||
value=lambda data: datetime.fromtimestamp(data.system.boot_time, tz=UTC),
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
@@ -241,6 +242,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=2,
|
||||
icon="mdi:chip",
|
||||
value=lambda data: data.cpu.power,
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
@@ -250,6 +252,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfFrequency.GIGAHERTZ,
|
||||
device_class=SensorDeviceClass.FREQUENCY,
|
||||
suggested_display_precision=2,
|
||||
icon="mdi:speedometer",
|
||||
value=cpu_speed,
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
@@ -275,6 +278,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = (
|
||||
SystemBridgeSensorEntityDescription(
|
||||
key="kernel",
|
||||
translation_key="kernel",
|
||||
icon="mdi:devices",
|
||||
value=lambda data: data.system.platform,
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
@@ -284,6 +288,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
suggested_display_precision=2,
|
||||
icon="mdi:memory",
|
||||
value=memory_free,
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
@@ -302,17 +307,20 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
suggested_display_precision=2,
|
||||
icon="mdi:memory",
|
||||
value=memory_used,
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
key="os",
|
||||
translation_key="os",
|
||||
icon="mdi:devices",
|
||||
value=lambda data: f"{data.system.platform} {data.system.platform_version}",
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
key="processes_count",
|
||||
translation_key="processes",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
icon="mdi:counter",
|
||||
value=lambda data: len(data.processes),
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
@@ -321,6 +329,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=1,
|
||||
icon="mdi:percent",
|
||||
value=lambda data: data.cpu.usage,
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
@@ -330,16 +339,19 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_display_precision=2,
|
||||
icon="mdi:power-plug",
|
||||
value=lambda data: data.system.power_usage,
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
key="version",
|
||||
translation_key="version",
|
||||
icon="mdi:counter",
|
||||
value=lambda data: data.system.version,
|
||||
),
|
||||
SystemBridgeSensorEntityDescription(
|
||||
key="version_latest",
|
||||
translation_key="version_latest",
|
||||
icon="mdi:counter",
|
||||
value=lambda data: data.system.version_latest,
|
||||
),
|
||||
)
|
||||
@@ -417,6 +429,7 @@ async def async_setup_entry(
|
||||
key="displays_connected",
|
||||
translation_key="displays_connected",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
icon="mdi:monitor",
|
||||
value=lambda data: len(data.displays) if data.displays else None,
|
||||
),
|
||||
entry.data[CONF_PORT],
|
||||
|
||||
@@ -194,7 +194,7 @@
|
||||
},
|
||||
"key": {
|
||||
"name": "Key",
|
||||
"description": "Key to press. List available here: {syntax_keys_documentation_url}."
|
||||
"description": "Key to press. List available here: http://robotjs.io/docs/syntax#keys."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -690,13 +690,10 @@ send_poll:
|
||||
selector:
|
||||
text:
|
||||
options:
|
||||
example: '["Option 1", "Option 2", "Option 3"]'
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
object:
|
||||
is_anonymous:
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
allows_multiple_answers:
|
||||
@@ -717,6 +714,10 @@ send_poll:
|
||||
min: 1
|
||||
max: 3600
|
||||
unit_of_measurement: seconds
|
||||
message_tag:
|
||||
example: "msg_to_edit"
|
||||
selector:
|
||||
text:
|
||||
reply_to_message_id:
|
||||
selector:
|
||||
number:
|
||||
|
||||
@@ -801,6 +801,10 @@
|
||||
"name": "Read timeout",
|
||||
"description": "Timeout for sending the poll in seconds."
|
||||
},
|
||||
"message_tag": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::message_tag::name%]",
|
||||
"description": "[%key:component::telegram_bot::services::send_message::fields::message_tag::description%]"
|
||||
},
|
||||
"reply_to_message_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::reply_to_message_id::name%]",
|
||||
"description": "[%key:component::telegram_bot::services::send_message::fields::reply_to_message_id::description%]"
|
||||
|
||||
@@ -258,14 +258,11 @@ class TeslaFleetEnergySiteHistoryCoordinator(DataUpdateCoordinator[dict[str, Any
|
||||
raise UpdateFailed("Received invalid data")
|
||||
|
||||
# Add all time periods together
|
||||
output = dict.fromkeys(ENERGY_HISTORY_FIELDS, None)
|
||||
output = dict.fromkeys(ENERGY_HISTORY_FIELDS, 0)
|
||||
for period in data.get("time_series", []):
|
||||
for key in ENERGY_HISTORY_FIELDS:
|
||||
if key in period:
|
||||
if output[key] is None:
|
||||
output[key] = period[key]
|
||||
else:
|
||||
output[key] += period[key]
|
||||
output[key] += period[key]
|
||||
|
||||
return output
|
||||
|
||||
|
||||
@@ -199,13 +199,10 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
raise UpdateFailed("Received invalid data")
|
||||
|
||||
# Add all time periods together
|
||||
output = dict.fromkeys(ENERGY_HISTORY_FIELDS, None)
|
||||
for period in data.get("time_series", []):
|
||||
output = dict.fromkeys(ENERGY_HISTORY_FIELDS, 0)
|
||||
for period in data["time_series"]:
|
||||
for key in ENERGY_HISTORY_FIELDS:
|
||||
if key in period:
|
||||
if output[key] is None:
|
||||
output[key] = period[key]
|
||||
else:
|
||||
output[key] += period[key]
|
||||
output[key] += period[key]
|
||||
|
||||
return output
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==1.2.3", "teslemetry-stream==0.7.10"]
|
||||
"requirements": ["tesla-fleet-api==1.2.3", "teslemetry-stream==0.7.9"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/thethingsnetwork",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["ttn_client==1.2.2"]
|
||||
"requirements": ["ttn_client==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -89,8 +89,7 @@ def async_migrate_entities_unique_ids(
|
||||
"""Migrate unique_ids in the entity registry after updating Uptime Kuma."""
|
||||
|
||||
if (
|
||||
coordinator.version is None
|
||||
or coordinator.version.version == coordinator.api.version.version
|
||||
coordinator.version is coordinator.api.version
|
||||
or int(coordinator.api.version.major) < 2
|
||||
):
|
||||
return
|
||||
@@ -117,32 +116,6 @@ def async_migrate_entities_unique_ids(
|
||||
new_unique_id=f"{registry_entry.config_entry_id}_{monitor.monitor_id!s}_{registry_entry.translation_key}",
|
||||
)
|
||||
|
||||
# migrate device identifiers and update version
|
||||
device_reg = dr.async_get(hass)
|
||||
for monitor in metrics.values():
|
||||
if device := device_reg.async_get_device(
|
||||
{(DOMAIN, f"{coordinator.config_entry.entry_id}_{monitor.monitor_name!s}")}
|
||||
):
|
||||
new_identifier = {
|
||||
(DOMAIN, f"{coordinator.config_entry.entry_id}_{monitor.monitor_id!s}")
|
||||
}
|
||||
device_reg.async_update_device(
|
||||
device.id,
|
||||
new_identifiers=new_identifier,
|
||||
sw_version=coordinator.api.version.version,
|
||||
)
|
||||
if device := device_reg.async_get_device(
|
||||
{(DOMAIN, f"{coordinator.config_entry.entry_id}_update")}
|
||||
):
|
||||
device_reg.async_update_device(
|
||||
device.id,
|
||||
sw_version=coordinator.api.version.version,
|
||||
)
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_reload(coordinator.config_entry.entry_id)
|
||||
)
|
||||
|
||||
|
||||
class UptimeKumaSoftwareUpdateCoordinator(DataUpdateCoordinator[LatestRelease]):
|
||||
"""Uptime Kuma coordinator for retrieving update information."""
|
||||
|
||||
@@ -45,6 +45,7 @@ class ViCareEntity(Entity):
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, identifier)},
|
||||
serial_number=device_serial,
|
||||
name=model,
|
||||
manufacturer="Viessmann",
|
||||
model=model,
|
||||
@@ -59,12 +60,3 @@ class ViCareEntity(Entity):
|
||||
DOMAIN,
|
||||
f"{gateway_serial}_zigbee_{zigbee_ieee}",
|
||||
)
|
||||
elif (
|
||||
len(parts) == 2
|
||||
and len(zigbee_ieee := device_serial.removeprefix("zigbee-")) == 16
|
||||
):
|
||||
self._attr_device_info["serial_number"] = "-".join(
|
||||
zigbee_ieee.upper()[i : i + 2] for i in range(0, 16, 2)
|
||||
)
|
||||
else:
|
||||
self._attr_device_info["serial_number"] = device_serial
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Vodafone Station integration."""
|
||||
|
||||
from aiohttp import ClientSession, CookieJar
|
||||
from aiovodafone.models import get_device_type
|
||||
from aiovodafone.api import VodafoneStationCommonApi
|
||||
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -42,7 +42,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: VodafoneConfigEntry) -
|
||||
session = ClientSession(cookie_jar=jar)
|
||||
|
||||
try:
|
||||
device_type, url = await get_device_type(
|
||||
device_type, url = await VodafoneStationCommonApi.get_device_type(
|
||||
entry.data[CONF_HOST],
|
||||
session,
|
||||
)
|
||||
@@ -54,7 +54,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: VodafoneConfigEntry) -
|
||||
new_data.update(
|
||||
{
|
||||
CONF_DEVICE_DETAILS: {
|
||||
DEVICE_TYPE: device_type.value,
|
||||
DEVICE_TYPE: device_type,
|
||||
DEVICE_URL: str(url),
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from aiovodafone import exceptions as aiovodafone_exceptions
|
||||
from aiovodafone.models import get_device_type, init_device_class
|
||||
from aiovodafone.api import VodafoneStationCommonApi, init_api_class
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
@@ -54,12 +54,12 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
|
||||
session = await async_client_session(hass)
|
||||
|
||||
device_type, url = await get_device_type(
|
||||
device_type, url = await VodafoneStationCommonApi.get_device_type(
|
||||
data[CONF_HOST],
|
||||
session,
|
||||
)
|
||||
|
||||
api = init_device_class(url, device_type, data, session)
|
||||
api = init_api_class(url, device_type, data, session)
|
||||
|
||||
try:
|
||||
await api.login()
|
||||
@@ -69,7 +69,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
return {
|
||||
"title": data[CONF_HOST],
|
||||
CONF_DEVICE_DETAILS: {
|
||||
DEVICE_TYPE: device_type.value,
|
||||
DEVICE_TYPE: device_type,
|
||||
DEVICE_URL: str(url),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -7,8 +7,7 @@ from typing import Any, cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from aiovodafone import exceptions
|
||||
from aiovodafone.api import VodafoneStationDevice
|
||||
from aiovodafone.models import init_device_class
|
||||
from aiovodafone.api import VodafoneStationDevice, init_api_class
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
@@ -71,7 +70,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
|
||||
data = config_entry.data
|
||||
|
||||
self.api = init_device_class(
|
||||
self.api = init_api_class(
|
||||
URL(data[CONF_DEVICE_DETAILS][DEVICE_URL]),
|
||||
data[CONF_DEVICE_DETAILS][DEVICE_TYPE],
|
||||
data,
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiovodafone"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiovodafone==3.0.0"]
|
||||
"requirements": ["aiovodafone==2.0.1"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.83"]
|
||||
"requirements": ["holidays==0.82"]
|
||||
}
|
||||
|
||||
@@ -12,10 +12,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from .const import DOMAIN
|
||||
from .coordinator import YardianUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
PLATFORMS: list[Platform] = [Platform.SWITCH]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@@ -1,133 +0,0 @@
|
||||
"""Binary sensors for Yardian integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import YardianUpdateCoordinator
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class YardianBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Entity description for Yardian binary sensors."""
|
||||
|
||||
value_fn: Callable[[YardianUpdateCoordinator], bool | None]
|
||||
|
||||
|
||||
def _zone_enabled_value(
|
||||
coordinator: YardianUpdateCoordinator, zone_id: int
|
||||
) -> bool | None:
|
||||
"""Return True if zone is enabled on controller."""
|
||||
try:
|
||||
return coordinator.data.zones[zone_id][1] == 1
|
||||
except (IndexError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def _zone_value_factory(
|
||||
zone_id: int,
|
||||
) -> Callable[[YardianUpdateCoordinator], bool | None]:
|
||||
"""Return a callable evaluating whether a zone is enabled."""
|
||||
|
||||
def value(coordinator: YardianUpdateCoordinator) -> bool | None:
|
||||
return _zone_enabled_value(coordinator, zone_id)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[YardianBinarySensorEntityDescription, ...] = (
|
||||
YardianBinarySensorEntityDescription(
|
||||
key="watering_running",
|
||||
translation_key="watering_running",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
value_fn=lambda coordinator: bool(coordinator.data.active_zones),
|
||||
),
|
||||
YardianBinarySensorEntityDescription(
|
||||
key="standby",
|
||||
translation_key="standby",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda coordinator: bool(
|
||||
coordinator.data.oper_info.get("iStandby", 0)
|
||||
),
|
||||
),
|
||||
YardianBinarySensorEntityDescription(
|
||||
key="freeze_prevent",
|
||||
translation_key="freeze_prevent",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda coordinator: bool(
|
||||
coordinator.data.oper_info.get("fFreezePrevent", 0)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Yardian binary sensors."""
|
||||
coordinator: YardianUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
entities: list[BinarySensorEntity] = [
|
||||
YardianBinarySensor(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
]
|
||||
|
||||
zone_descriptions = [
|
||||
YardianBinarySensorEntityDescription(
|
||||
key=f"zone_enabled_{zone_id}",
|
||||
translation_key="zone_enabled",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_zone_value_factory(zone_id),
|
||||
translation_placeholders={"zone": str(zone_id + 1)},
|
||||
)
|
||||
for zone_id in range(len(coordinator.data.zones))
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
YardianBinarySensor(coordinator, description)
|
||||
for description in zone_descriptions
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class YardianBinarySensor(
|
||||
CoordinatorEntity[YardianUpdateCoordinator], BinarySensorEntity
|
||||
):
|
||||
"""Representation of a Yardian binary sensor based on a description."""
|
||||
|
||||
entity_description: YardianBinarySensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: YardianUpdateCoordinator,
|
||||
description: YardianBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the Yardian binary sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.yid}-{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the current state based on the description's value function."""
|
||||
return self.entity_description.value_fn(self.coordinator)
|
||||
@@ -6,12 +6,15 @@ import asyncio
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
from pyyardian import AsyncYardianClient, NetworkException, NotAuthorizedException
|
||||
from pyyardian.typing import OperationInfo
|
||||
from pyyardian import (
|
||||
AsyncYardianClient,
|
||||
NetworkException,
|
||||
NotAuthorizedException,
|
||||
YardianDeviceState,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -22,22 +25,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SCAN_INTERVAL = datetime.timedelta(seconds=30)
|
||||
|
||||
|
||||
class YardianCombinedState:
|
||||
"""Combined device state for Yardian."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
zones: list[list],
|
||||
active_zones: set[int],
|
||||
oper_info: OperationInfo,
|
||||
) -> None:
|
||||
"""Initialize combined state with zones, active_zones and oper_info."""
|
||||
self.zones = zones
|
||||
self.active_zones = active_zones
|
||||
self.oper_info = oper_info
|
||||
|
||||
|
||||
class YardianUpdateCoordinator(DataUpdateCoordinator[YardianCombinedState]):
|
||||
class YardianUpdateCoordinator(DataUpdateCoordinator[YardianDeviceState]):
|
||||
"""Coordinator for Yardian API calls."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
@@ -62,7 +50,6 @@ class YardianUpdateCoordinator(DataUpdateCoordinator[YardianCombinedState]):
|
||||
self.yid = entry.data["yid"]
|
||||
self._name = entry.title
|
||||
self._model = entry.data["model"]
|
||||
self._serial = entry.data.get("serialNumber")
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
@@ -72,41 +59,17 @@ class YardianUpdateCoordinator(DataUpdateCoordinator[YardianCombinedState]):
|
||||
identifiers={(DOMAIN, self.yid)},
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self._model,
|
||||
serial_number=self._serial,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> YardianCombinedState:
|
||||
async def _async_update_data(self) -> YardianDeviceState:
|
||||
"""Fetch data from Yardian device."""
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
_LOGGER.debug(
|
||||
"Fetching Yardian device state for %s (controller=%s)",
|
||||
self._name,
|
||||
type(self.controller).__name__,
|
||||
)
|
||||
# Fetch device state and operation info; specific exceptions are
|
||||
# handled by the outer block to avoid double-logging.
|
||||
dev_state = await self.controller.fetch_device_state()
|
||||
oper_info = await self.controller.fetch_oper_info()
|
||||
oper_keys = list(oper_info.keys()) if hasattr(oper_info, "keys") else []
|
||||
_LOGGER.debug(
|
||||
"Fetched Yardian data: zones=%s active=%s oper_keys=%s",
|
||||
len(getattr(dev_state, "zones", [])),
|
||||
len(getattr(dev_state, "active_zones", [])),
|
||||
oper_keys,
|
||||
)
|
||||
return YardianCombinedState(
|
||||
zones=dev_state.zones,
|
||||
active_zones=dev_state.active_zones,
|
||||
oper_info=oper_info,
|
||||
)
|
||||
return await self.controller.fetch_device_state()
|
||||
|
||||
except TimeoutError as e:
|
||||
raise UpdateFailed("Timeout communicating with device") from e
|
||||
raise UpdateFailed("Communication with Device was time out") from e
|
||||
except NotAuthorizedException as e:
|
||||
raise ConfigEntryError("Invalid access token") from e
|
||||
raise UpdateFailed("Invalid access token") from e
|
||||
except NetworkException as e:
|
||||
raise UpdateFailed("Failed to communicate with device") from e
|
||||
except Exception as e: # safety net for tests to surface failure reason
|
||||
_LOGGER.exception("Unexpected error while fetching Yardian data")
|
||||
raise UpdateFailed(f"Unexpected error: {type(e).__name__}: {e}") from e
|
||||
raise UpdateFailed("Failed to communicate with Device") from e
|
||||
|
||||
@@ -4,26 +4,6 @@
|
||||
"switch": {
|
||||
"default": "mdi:water"
|
||||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
"watering_running": {
|
||||
"default": "mdi:sprinkler",
|
||||
"state": {
|
||||
"off": "mdi:sprinkler-variant"
|
||||
}
|
||||
},
|
||||
"standby": {
|
||||
"default": "mdi:pause-circle"
|
||||
},
|
||||
"freeze_prevent": {
|
||||
"default": "mdi:snowflake-alert"
|
||||
},
|
||||
"zone_enabled": {
|
||||
"default": "mdi:toggle-switch",
|
||||
"state": {
|
||||
"off": "mdi:toggle-switch-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -20,22 +20,6 @@
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"watering_running": {
|
||||
"name": "Watering running"
|
||||
},
|
||||
"standby": {
|
||||
"name": "Standby"
|
||||
},
|
||||
"freeze_prevent": {
|
||||
"name": "Freeze prevent"
|
||||
},
|
||||
"zone_enabled": {
|
||||
"name": "Zone {zone} enabled"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"start_irrigation": {
|
||||
"name": "Start irrigation",
|
||||
|
||||
@@ -40,7 +40,6 @@
|
||||
"step": {
|
||||
"configure_addon_user": {
|
||||
"data": {
|
||||
"socket_path": "Socket device path",
|
||||
"usb_path": "[%key:common::config_flow::data::usb_path%]"
|
||||
},
|
||||
"description": "Select your Z-Wave adapter",
|
||||
@@ -72,7 +71,6 @@
|
||||
"s2_access_control_key": "[%key:component::zwave_js::config::step::configure_security_keys::data::s2_access_control_key%]",
|
||||
"s2_authenticated_key": "[%key:component::zwave_js::config::step::configure_security_keys::data::s2_authenticated_key%]",
|
||||
"s2_unauthenticated_key": "[%key:component::zwave_js::config::step::configure_security_keys::data::s2_unauthenticated_key%]",
|
||||
"socket_path": "[%key:component::zwave_js::config::step::configure_addon_user::data::socket_path%]",
|
||||
"usb_path": "[%key:common::config_flow::data::usb_path%]"
|
||||
},
|
||||
"description": "[%key:component::zwave_js::config::step::configure_addon_user::description%]",
|
||||
@@ -160,7 +158,6 @@
|
||||
},
|
||||
"choose_serial_port": {
|
||||
"data": {
|
||||
"socket_path": "[%key:component::zwave_js::config::step::configure_addon_user::data::socket_path%]",
|
||||
"usb_path": "[%key:common::config_flow::data::usb_path%]"
|
||||
},
|
||||
"title": "Select your Z-Wave device"
|
||||
|
||||
@@ -3214,11 +3214,13 @@ class ConfigFlow(ConfigEntryBaseFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Finish config flow and create a config entry."""
|
||||
if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}:
|
||||
raise HomeAssistantError(
|
||||
f"Creates a new entry in a '{self.source}' flow, "
|
||||
"when it is expected to update an existing entry and abort"
|
||||
report_usage(
|
||||
f"creates a new entry in a '{self.source}' flow, "
|
||||
"when it is expected to update an existing entry and abort",
|
||||
core_behavior=ReportBehavior.LOG,
|
||||
breaks_in_ha_version="2025.11",
|
||||
integration_domain=self.handler,
|
||||
)
|
||||
|
||||
result = super().async_create_entry(
|
||||
title=title,
|
||||
data=data,
|
||||
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -202,7 +202,6 @@ FLOWS = {
|
||||
"fibaro",
|
||||
"file",
|
||||
"filesize",
|
||||
"fing",
|
||||
"firefly_iii",
|
||||
"fireservicerota",
|
||||
"fitbit",
|
||||
|
||||
@@ -1984,12 +1984,6 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"fing": {
|
||||
"name": "Fing",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"fints": {
|
||||
"name": "FinTS",
|
||||
"integration_type": "service",
|
||||
|
||||
@@ -660,19 +660,27 @@ def _get_exposed_entities(
|
||||
|
||||
entity_entry = entity_registry.async_get(state.entity_id)
|
||||
names = [state.name]
|
||||
device_name = None
|
||||
area_names = []
|
||||
|
||||
if entity_entry is not None:
|
||||
names.extend(entity_entry.aliases)
|
||||
device = (
|
||||
device_registry.async_get(entity_entry.device_id)
|
||||
if entity_entry.device_id
|
||||
else None
|
||||
)
|
||||
|
||||
if device:
|
||||
device_name = device.name_by_user or device.name
|
||||
|
||||
if entity_entry.area_id and (
|
||||
area := area_registry.async_get_area(entity_entry.area_id)
|
||||
):
|
||||
# Entity is in area
|
||||
area_names.append(area.name)
|
||||
area_names.extend(area.aliases)
|
||||
elif entity_entry.device_id and (
|
||||
device := device_registry.async_get(entity_entry.device_id)
|
||||
):
|
||||
elif device:
|
||||
# Check device area
|
||||
if device.area_id and (
|
||||
area := area_registry.async_get_area(device.area_id)
|
||||
@@ -693,6 +701,9 @@ def _get_exposed_entities(
|
||||
if (parsed_utc := dt_util.parse_datetime(state.state)) is not None:
|
||||
info["state"] = dt_util.as_local(parsed_utc).isoformat()
|
||||
|
||||
if device_name and not state.name.lower().startswith(device_name.lower()):
|
||||
info["device"] = device_name
|
||||
|
||||
if area_names:
|
||||
info["areas"] = ", ".join(area_names)
|
||||
|
||||
|
||||
@@ -698,13 +698,7 @@ class _ScriptRun:
|
||||
if cond(hass, variables) is False:
|
||||
return False
|
||||
except exceptions.ConditionError as ex:
|
||||
self._log(
|
||||
"Error in '%s[%s]' evaluation: %s",
|
||||
name,
|
||||
idx,
|
||||
ex,
|
||||
level=logging.WARNING,
|
||||
)
|
||||
_LOGGER.warning("Error in '%s[%s]' evaluation: %s", name, idx, ex)
|
||||
return None
|
||||
|
||||
return True
|
||||
@@ -725,11 +719,7 @@ class _ScriptRun:
|
||||
await self._async_run_script(script)
|
||||
return
|
||||
except exceptions.ConditionError as ex:
|
||||
self._log(
|
||||
"Error in 'choose' evaluation:\n%s",
|
||||
ex,
|
||||
level=logging.WARNING,
|
||||
)
|
||||
_LOGGER.warning("Error in 'choose' evaluation:\n%s", ex)
|
||||
|
||||
if choose_data["default"] is not None:
|
||||
trace_set_result(choice="default")
|
||||
@@ -748,7 +738,7 @@ class _ScriptRun:
|
||||
trace_element.reuse_by_child = True
|
||||
check = cond(self._hass, self._variables)
|
||||
except exceptions.ConditionError as ex:
|
||||
self._log("Error in 'condition' evaluation:\n%s", ex, level=logging.WARNING)
|
||||
_LOGGER.warning("Error in 'condition' evaluation:\n%s", ex)
|
||||
check = False
|
||||
|
||||
self._log("Test condition %s: %s", self._script.last_action, check)
|
||||
@@ -761,10 +751,13 @@ class _ScriptRun:
|
||||
if_data = await self._script._async_get_if_data(self._step) # noqa: SLF001
|
||||
|
||||
test_conditions: bool | None = False
|
||||
with trace_path("if"):
|
||||
test_conditions = self._test_conditions(
|
||||
if_data["if_conditions"], "if", "condition"
|
||||
)
|
||||
try:
|
||||
with trace_path("if"):
|
||||
test_conditions = self._test_conditions(
|
||||
if_data["if_conditions"], "if", "condition"
|
||||
)
|
||||
except exceptions.ConditionError as ex:
|
||||
_LOGGER.warning("Error in 'if' evaluation:\n%s", ex)
|
||||
|
||||
if test_conditions:
|
||||
trace_set_result(choice="then")
|
||||
@@ -855,28 +848,33 @@ class _ScriptRun:
|
||||
]
|
||||
for iteration in itertools.count(1):
|
||||
set_repeat_var(iteration)
|
||||
if self._stop.done():
|
||||
break
|
||||
if not self._test_conditions(conditions, "while"):
|
||||
try:
|
||||
if self._stop.done():
|
||||
break
|
||||
if not self._test_conditions(conditions, "while"):
|
||||
break
|
||||
except exceptions.ConditionError as ex:
|
||||
_LOGGER.warning("Error in 'while' evaluation:\n%s", ex)
|
||||
break
|
||||
|
||||
if iteration > 1:
|
||||
if iteration > REPEAT_WARN_ITERATIONS:
|
||||
if not warned_too_many_loops:
|
||||
warned_too_many_loops = True
|
||||
self._log(
|
||||
"While condition %s looped %s times",
|
||||
_LOGGER.warning(
|
||||
"While condition %s in script `%s` looped %s times",
|
||||
repeat[CONF_WHILE],
|
||||
self._script.name,
|
||||
REPEAT_WARN_ITERATIONS,
|
||||
level=logging.WARNING,
|
||||
)
|
||||
|
||||
if iteration > REPEAT_TERMINATE_ITERATIONS:
|
||||
self._log(
|
||||
"While condition %s terminated because it looped %s times",
|
||||
_LOGGER.critical(
|
||||
"While condition %s in script `%s` "
|
||||
"terminated because it looped %s times",
|
||||
repeat[CONF_WHILE],
|
||||
self._script.name,
|
||||
REPEAT_TERMINATE_ITERATIONS,
|
||||
level=logging.CRITICAL,
|
||||
)
|
||||
raise _AbortScript(
|
||||
f"While condition {repeat[CONF_WHILE]} "
|
||||
@@ -898,27 +896,32 @@ class _ScriptRun:
|
||||
for iteration in itertools.count(1):
|
||||
set_repeat_var(iteration)
|
||||
await async_run_sequence(iteration)
|
||||
if self._stop.done():
|
||||
break
|
||||
if self._test_conditions(conditions, "until") in [True, None]:
|
||||
try:
|
||||
if self._stop.done():
|
||||
break
|
||||
if self._test_conditions(conditions, "until") in [True, None]:
|
||||
break
|
||||
except exceptions.ConditionError as ex:
|
||||
_LOGGER.warning("Error in 'until' evaluation:\n%s", ex)
|
||||
break
|
||||
|
||||
if iteration >= REPEAT_WARN_ITERATIONS:
|
||||
if not warned_too_many_loops:
|
||||
warned_too_many_loops = True
|
||||
self._log(
|
||||
"Until condition %s looped %s times",
|
||||
_LOGGER.warning(
|
||||
"Until condition %s in script `%s` looped %s times",
|
||||
repeat[CONF_UNTIL],
|
||||
self._script.name,
|
||||
REPEAT_WARN_ITERATIONS,
|
||||
level=logging.WARNING,
|
||||
)
|
||||
|
||||
if iteration >= REPEAT_TERMINATE_ITERATIONS:
|
||||
self._log(
|
||||
"Until condition %s terminated because it looped %s times",
|
||||
_LOGGER.critical(
|
||||
"Until condition %s in script `%s` "
|
||||
"terminated because it looped %s times",
|
||||
repeat[CONF_UNTIL],
|
||||
self._script.name,
|
||||
REPEAT_TERMINATE_ITERATIONS,
|
||||
level=logging.CRITICAL,
|
||||
)
|
||||
raise _AbortScript(
|
||||
f"Until condition {repeat[CONF_UNTIL]} "
|
||||
|
||||
25
requirements_all.txt
generated
25
requirements_all.txt
generated
@@ -84,7 +84,7 @@ PyQRCode==1.2.1
|
||||
PyRMVtransport==0.3.3
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.72.0
|
||||
PySwitchbot==0.71.0
|
||||
|
||||
# homeassistant.components.switchmate
|
||||
PySwitchmate==0.5.1
|
||||
@@ -188,7 +188,7 @@ aioairq==0.4.7
|
||||
aioairzone-cloud==0.7.2
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==1.0.2
|
||||
aioairzone==1.0.1
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==6.4.6
|
||||
@@ -432,7 +432,7 @@ aiousbwatcher==1.1.1
|
||||
aiovlc==0.5.1
|
||||
|
||||
# homeassistant.components.vodafone_station
|
||||
aiovodafone==3.0.0
|
||||
aiovodafone==2.0.1
|
||||
|
||||
# homeassistant.components.waqi
|
||||
aiowaqi==3.1.0
|
||||
@@ -955,9 +955,6 @@ feedparser==6.0.12
|
||||
# homeassistant.components.file
|
||||
file-read-backwards==2.0.0
|
||||
|
||||
# homeassistant.components.fing
|
||||
fing_agent_api==1.0.3
|
||||
|
||||
# homeassistant.components.fints
|
||||
fints==3.1.0
|
||||
|
||||
@@ -1195,7 +1192,7 @@ hole==0.9.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.83
|
||||
holidays==0.82
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251001.4
|
||||
@@ -1735,7 +1732,7 @@ plexauth==0.0.6
|
||||
plexwebsocket==0.0.14
|
||||
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==1.8.2
|
||||
plugwise==1.8.1
|
||||
|
||||
# homeassistant.components.serial_pm
|
||||
pmsensor==0.4
|
||||
@@ -1981,7 +1978,7 @@ pydrawise==2025.9.0
|
||||
pydroid-ipcam==3.0.0
|
||||
|
||||
# homeassistant.components.droplet
|
||||
pydroplet==2.3.4
|
||||
pydroplet==2.3.3
|
||||
|
||||
# homeassistant.components.ebox
|
||||
pyebox==1.1.4
|
||||
@@ -2305,10 +2302,10 @@ pyplaato==0.0.19
|
||||
pypoint==3.0.0
|
||||
|
||||
# homeassistant.components.portainer
|
||||
pyportainer==1.0.7
|
||||
pyportainer==1.0.4
|
||||
|
||||
# homeassistant.components.probe_plus
|
||||
pyprobeplus==1.1.2
|
||||
pyprobeplus==1.1.1
|
||||
|
||||
# homeassistant.components.profiler
|
||||
pyprof2calltree==1.4.5
|
||||
@@ -2926,7 +2923,7 @@ streamlabswater==1.0.1
|
||||
stringcase==1.2.0
|
||||
|
||||
# homeassistant.components.subaru
|
||||
subarulink==0.7.15
|
||||
subarulink==0.7.13
|
||||
|
||||
# homeassistant.components.surepetcare
|
||||
surepy==0.9.0
|
||||
@@ -2982,7 +2979,7 @@ tesla-powerwall==0.5.2
|
||||
tesla-wall-connector==1.0.2
|
||||
|
||||
# homeassistant.components.teslemetry
|
||||
teslemetry-stream==0.7.10
|
||||
teslemetry-stream==0.7.9
|
||||
|
||||
# homeassistant.components.tessie
|
||||
tessie-api==0.1.1
|
||||
@@ -3045,7 +3042,7 @@ triggercmd==0.0.36
|
||||
ttls==1.8.3
|
||||
|
||||
# homeassistant.components.thethingsnetwork
|
||||
ttn_client==1.2.2
|
||||
ttn_client==1.2.0
|
||||
|
||||
# homeassistant.components.tuya
|
||||
tuya-device-sharing-sdk==0.2.4
|
||||
|
||||
25
requirements_test_all.txt
generated
25
requirements_test_all.txt
generated
@@ -81,7 +81,7 @@ PyQRCode==1.2.1
|
||||
PyRMVtransport==0.3.3
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.72.0
|
||||
PySwitchbot==0.71.0
|
||||
|
||||
# homeassistant.components.syncthru
|
||||
PySyncThru==0.8.0
|
||||
@@ -176,7 +176,7 @@ aioairq==0.4.7
|
||||
aioairzone-cloud==0.7.2
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==1.0.2
|
||||
aioairzone==1.0.1
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==6.4.6
|
||||
@@ -414,7 +414,7 @@ aiousbwatcher==1.1.1
|
||||
aiovlc==0.5.1
|
||||
|
||||
# homeassistant.components.vodafone_station
|
||||
aiovodafone==3.0.0
|
||||
aiovodafone==2.0.1
|
||||
|
||||
# homeassistant.components.waqi
|
||||
aiowaqi==3.1.0
|
||||
@@ -834,9 +834,6 @@ feedparser==6.0.12
|
||||
# homeassistant.components.file
|
||||
file-read-backwards==2.0.0
|
||||
|
||||
# homeassistant.components.fing
|
||||
fing_agent_api==1.0.3
|
||||
|
||||
# homeassistant.components.fints
|
||||
fints==3.1.0
|
||||
|
||||
@@ -1044,7 +1041,7 @@ hole==0.9.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.83
|
||||
holidays==0.82
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251001.4
|
||||
@@ -1476,7 +1473,7 @@ plexauth==0.0.6
|
||||
plexwebsocket==0.0.14
|
||||
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==1.8.2
|
||||
plugwise==1.8.1
|
||||
|
||||
# homeassistant.components.poolsense
|
||||
poolsense==0.0.8
|
||||
@@ -1665,7 +1662,7 @@ pydrawise==2025.9.0
|
||||
pydroid-ipcam==3.0.0
|
||||
|
||||
# homeassistant.components.droplet
|
||||
pydroplet==2.3.4
|
||||
pydroplet==2.3.3
|
||||
|
||||
# homeassistant.components.ecoforest
|
||||
pyecoforest==0.4.0
|
||||
@@ -1932,10 +1929,10 @@ pyplaato==0.0.19
|
||||
pypoint==3.0.0
|
||||
|
||||
# homeassistant.components.portainer
|
||||
pyportainer==1.0.7
|
||||
pyportainer==1.0.4
|
||||
|
||||
# homeassistant.components.probe_plus
|
||||
pyprobeplus==1.1.2
|
||||
pyprobeplus==1.1.1
|
||||
|
||||
# homeassistant.components.profiler
|
||||
pyprof2calltree==1.4.5
|
||||
@@ -2433,7 +2430,7 @@ streamlabswater==1.0.1
|
||||
stringcase==1.2.0
|
||||
|
||||
# homeassistant.components.subaru
|
||||
subarulink==0.7.15
|
||||
subarulink==0.7.13
|
||||
|
||||
# homeassistant.components.surepetcare
|
||||
surepy==0.9.0
|
||||
@@ -2471,7 +2468,7 @@ tesla-powerwall==0.5.2
|
||||
tesla-wall-connector==1.0.2
|
||||
|
||||
# homeassistant.components.teslemetry
|
||||
teslemetry-stream==0.7.10
|
||||
teslemetry-stream==0.7.9
|
||||
|
||||
# homeassistant.components.tessie
|
||||
tessie-api==0.1.1
|
||||
@@ -2522,7 +2519,7 @@ triggercmd==0.0.36
|
||||
ttls==1.8.3
|
||||
|
||||
# homeassistant.components.thethingsnetwork
|
||||
ttn_client==1.2.2
|
||||
ttn_client==1.2.0
|
||||
|
||||
# homeassistant.components.tuya
|
||||
tuya-device-sharing-sdk==0.2.4
|
||||
|
||||
@@ -317,25 +317,7 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema:
|
||||
translation_value_validator,
|
||||
slug_validator=translation_key_validator,
|
||||
),
|
||||
vol.Optional("fields"): vol.Any(
|
||||
# Old format:
|
||||
# "key": "translation"
|
||||
cv.schema_with_slug_keys(str),
|
||||
# New format:
|
||||
# "key": {
|
||||
# "name": "translated field name",
|
||||
# "description": "translated field description"
|
||||
# }
|
||||
cv.schema_with_slug_keys(
|
||||
{
|
||||
vol.Required("name"): str,
|
||||
vol.Required(
|
||||
"description"
|
||||
): translation_value_validator,
|
||||
},
|
||||
slug_validator=translation_key_validator,
|
||||
),
|
||||
),
|
||||
vol.Optional("fields"): cv.schema_with_slug_keys(str),
|
||||
},
|
||||
slug_validator=vol.Any("_", cv.slug),
|
||||
),
|
||||
|
||||
@@ -8,7 +8,7 @@ cd "$(dirname "$0")/.."
|
||||
|
||||
# Add default vscode settings if not existing
|
||||
SETTINGS_FILE=./.vscode/settings.json
|
||||
SETTINGS_TEMPLATE_FILE=./.vscode/settings.default.jsonc
|
||||
SETTINGS_TEMPLATE_FILE=./.vscode/settings.default.json
|
||||
if [ ! -f "$SETTINGS_FILE" ]; then
|
||||
echo "Copy $SETTINGS_TEMPLATE_FILE to $SETTINGS_FILE."
|
||||
cp "$SETTINGS_TEMPLATE_FILE" "$SETTINGS_FILE"
|
||||
|
||||
@@ -328,64 +328,65 @@
|
||||
'firmware': '3.31',
|
||||
'full-name': 'Airzone [1] System',
|
||||
'id': 1,
|
||||
'masters': list([
|
||||
1,
|
||||
]),
|
||||
'masters-slaves': dict({
|
||||
'1': list([
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
]),
|
||||
}),
|
||||
'master-system-zone': '1:1',
|
||||
'master-zone': 1,
|
||||
'mode': 3,
|
||||
'model': 'C6',
|
||||
'problems': False,
|
||||
'q-adapt': 0,
|
||||
'slaves': list([
|
||||
'modes': list([
|
||||
1,
|
||||
4,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
]),
|
||||
'problems': False,
|
||||
'q-adapt': 0,
|
||||
}),
|
||||
'2': dict({
|
||||
'available': True,
|
||||
'full-name': 'Airzone [2] System',
|
||||
'id': 2,
|
||||
'masters': list([
|
||||
'master-system-zone': '2:1',
|
||||
'master-zone': 1,
|
||||
'mode': 7,
|
||||
'modes': list([
|
||||
7,
|
||||
1,
|
||||
]),
|
||||
'masters-slaves': dict({
|
||||
'1': list([
|
||||
]),
|
||||
}),
|
||||
'problems': False,
|
||||
}),
|
||||
'3': dict({
|
||||
'available': True,
|
||||
'full-name': 'Airzone [3] System',
|
||||
'id': 3,
|
||||
'masters': list([
|
||||
'master-system-zone': '3:1',
|
||||
'master-zone': 1,
|
||||
'mode': 7,
|
||||
'modes': list([
|
||||
4,
|
||||
2,
|
||||
3,
|
||||
5,
|
||||
7,
|
||||
1,
|
||||
]),
|
||||
'masters-slaves': dict({
|
||||
'1': list([
|
||||
]),
|
||||
}),
|
||||
'problems': False,
|
||||
}),
|
||||
'4': dict({
|
||||
'available': True,
|
||||
'full-name': 'Airzone [4] System',
|
||||
'id': 4,
|
||||
'masters': list([
|
||||
'master-system-zone': '4:1',
|
||||
'master-zone': 1,
|
||||
'mode': 6,
|
||||
'modes': list([
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
]),
|
||||
'masters-slaves': dict({
|
||||
'1': list([
|
||||
]),
|
||||
}),
|
||||
'problems': False,
|
||||
}),
|
||||
}),
|
||||
|
||||
@@ -68,7 +68,6 @@ def get_fake_chromecast(info: ChromecastInfo):
|
||||
mock = MagicMock(uuid=info.uuid)
|
||||
mock.app_id = None
|
||||
mock.media_controller.status = None
|
||||
mock.is_idle = True
|
||||
return mock
|
||||
|
||||
|
||||
@@ -888,7 +887,6 @@ async def test_entity_cast_status(
|
||||
assert not state.attributes.get("is_volume_muted")
|
||||
|
||||
chromecast.app_id = "1234"
|
||||
chromecast.is_idle = False
|
||||
cast_status = MagicMock()
|
||||
cast_status.volume_level = 0.5
|
||||
cast_status.volume_muted = False
|
||||
@@ -1601,7 +1599,6 @@ async def test_entity_media_states(
|
||||
|
||||
# App id updated, but no media status
|
||||
chromecast.app_id = app_id
|
||||
chromecast.is_idle = False
|
||||
cast_status = MagicMock()
|
||||
cast_status_cb(cast_status)
|
||||
await hass.async_block_till_done()
|
||||
@@ -1644,7 +1641,6 @@ async def test_entity_media_states(
|
||||
|
||||
# App no longer running
|
||||
chromecast.app_id = pychromecast.IDLE_APP_ID
|
||||
chromecast.is_idle = True
|
||||
cast_status = MagicMock()
|
||||
cast_status_cb(cast_status)
|
||||
await hass.async_block_till_done()
|
||||
@@ -1652,7 +1648,6 @@ async def test_entity_media_states(
|
||||
assert state.state == "off"
|
||||
|
||||
# No cast status
|
||||
chromecast.app_id = None
|
||||
chromecast.is_idle = False
|
||||
cast_status_cb(None)
|
||||
await hass.async_block_till_done()
|
||||
@@ -1727,7 +1722,6 @@ async def test_entity_media_states_lovelace_app(
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state == "off"
|
||||
|
||||
chromecast.app_id = None
|
||||
chromecast.is_idle = False
|
||||
media_status_cb(media_status)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user