mirror of
https://github.com/home-assistant/core.git
synced 2025-07-30 16:57:19 +00:00
Merge branch 'dev' of github.com:home-assistant/core into track_entity_changes
This commit is contained in:
commit
72a982baa6
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 3
|
||||
CACHE_VERSION: 4
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.8"
|
||||
|
@ -381,6 +381,7 @@ homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
homeassistant.components.opower.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
|
@ -76,6 +76,7 @@ from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
category_registry,
|
||||
condition,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
entity,
|
||||
@ -452,6 +453,7 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
create_eager_task(restore_state.async_load(hass)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
create_eager_task(condition.async_setup(hass)),
|
||||
create_eager_task(trigger.async_setup(hass)),
|
||||
)
|
||||
|
||||
|
@ -1,11 +1,12 @@
|
||||
"""Integration to offer AI tasks to Home Assistant."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
||||
from homeassistant.core import (
|
||||
HassJobType,
|
||||
HomeAssistant,
|
||||
@ -14,12 +15,15 @@ from homeassistant.core import (
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, storage
|
||||
from homeassistant.helpers import config_validation as cv, selector, storage
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
|
||||
|
||||
from .const import (
|
||||
ATTR_ATTACHMENTS,
|
||||
ATTR_INSTRUCTIONS,
|
||||
ATTR_REQUIRED,
|
||||
ATTR_STRUCTURE,
|
||||
ATTR_TASK_NAME,
|
||||
DATA_COMPONENT,
|
||||
DATA_PREFERENCES,
|
||||
@ -29,7 +33,7 @@ from .const import (
|
||||
)
|
||||
from .entity import AITaskEntity
|
||||
from .http import async_setup as async_setup_http
|
||||
from .task import GenDataTask, GenDataTaskResult, async_generate_data
|
||||
from .task import GenDataTask, GenDataTaskResult, PlayMediaWithId, async_generate_data
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
@ -37,6 +41,7 @@ __all__ = [
|
||||
"AITaskEntityFeature",
|
||||
"GenDataTask",
|
||||
"GenDataTaskResult",
|
||||
"PlayMediaWithId",
|
||||
"async_generate_data",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
@ -47,6 +52,27 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
STRUCTURE_FIELD_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_DESCRIPTION): str,
|
||||
vol.Optional(ATTR_REQUIRED): bool,
|
||||
vol.Required(CONF_SELECTOR): selector.validate_selector,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _validate_structure_fields(value: dict[str, Any]) -> vol.Schema:
|
||||
"""Validate the structure fields as a voluptuous Schema."""
|
||||
if not isinstance(value, dict):
|
||||
raise vol.Invalid("Structure must be a dictionary")
|
||||
fields = {}
|
||||
for k, v in value.items():
|
||||
field_class = vol.Required if v.get(ATTR_REQUIRED, False) else vol.Optional
|
||||
fields[field_class(k, description=v.get(CONF_DESCRIPTION))] = selector.selector(
|
||||
v[CONF_SELECTOR]
|
||||
)
|
||||
return vol.Schema(fields, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Register the process service."""
|
||||
@ -64,6 +90,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_STRUCTURE): vol.All(
|
||||
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
|
||||
_validate_structure_fields,
|
||||
),
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
|
@ -21,6 +21,9 @@ SERVICE_GENERATE_DATA = "generate_data"
|
||||
|
||||
ATTR_INSTRUCTIONS: Final = "instructions"
|
||||
ATTR_TASK_NAME: Final = "task_name"
|
||||
ATTR_STRUCTURE: Final = "structure"
|
||||
ATTR_REQUIRED: Final = "required"
|
||||
ATTR_ATTACHMENTS: Final = "attachments"
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a Home Assistant expert and help users with their tasks."
|
||||
@ -32,3 +35,6 @@ class AITaskEntityFeature(IntFlag):
|
||||
|
||||
GENERATE_DATA = 1
|
||||
"""Generate data based on instructions."""
|
||||
|
||||
SUPPORT_ATTACHMENTS = 2
|
||||
"""Support attachments with generate data."""
|
||||
|
@ -2,7 +2,7 @@
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal"
|
||||
|
@ -17,3 +17,15 @@ generate_data:
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
structure:
|
||||
advanced: true
|
||||
required: false
|
||||
example: '{ "name": { "selector": { "text": }, "description": "Name of the user", "required": "True" } } }, "age": { "selector": { "number": }, "description": "Age of the user" } }'
|
||||
selector:
|
||||
object:
|
||||
attachments:
|
||||
required: false
|
||||
selector:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
|
@ -15,6 +15,14 @@
|
||||
"entity_id": {
|
||||
"name": "Entity ID",
|
||||
"description": "Entity ID to run the task on. If not provided, the preferred entity will be used."
|
||||
},
|
||||
"structure": {
|
||||
"name": "Structured output",
|
||||
"description": "When set, the AI Task will output fields with this in structure. The structure is a dictionary where the keys are the field names and the values contain a 'description', a 'selector', and an optional 'required' field."
|
||||
},
|
||||
"attachments": {
|
||||
"name": "Attachments",
|
||||
"description": "List of files to attach for multi-modal AI analysis."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,21 +2,38 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, fields
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PlayMediaWithId(media_source.PlayMedia):
|
||||
"""Play media with a media content ID."""
|
||||
|
||||
media_content_id: str
|
||||
"""Media source ID to play."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return media source ID as a string."""
|
||||
return f"<PlayMediaWithId {self.media_content_id}>"
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
@ -34,10 +51,37 @@ async def async_generate_data(
|
||||
f"AI Task entity {entity_id} does not support generating data"
|
||||
)
|
||||
|
||||
# Resolve attachments
|
||||
resolved_attachments: list[PlayMediaWithId] | None = None
|
||||
|
||||
if attachments:
|
||||
if AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
resolved_attachments = []
|
||||
|
||||
for attachment in attachments:
|
||||
media = await media_source.async_resolve_media(
|
||||
hass, attachment["media_content_id"], None
|
||||
)
|
||||
resolved_attachments.append(
|
||||
PlayMediaWithId(
|
||||
**{
|
||||
field.name: getattr(media, field.name)
|
||||
for field in fields(media)
|
||||
},
|
||||
media_content_id=attachment["media_content_id"],
|
||||
)
|
||||
)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments,
|
||||
)
|
||||
)
|
||||
|
||||
@ -52,6 +96,12 @@ class GenDataTask:
|
||||
instructions: str
|
||||
"""Instructions on what needs to be done."""
|
||||
|
||||
structure: vol.Schema | None = None
|
||||
"""Optional structure for the data to be generated."""
|
||||
|
||||
attachments: list[PlayMediaWithId] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
|
@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.2.2"]
|
||||
"requirements": ["aioamazondevices==3.2.3"]
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ SERVICE_UPLOAD = "upload"
|
||||
ANDROIDTV_STATES = {
|
||||
"off": MediaPlayerState.OFF,
|
||||
"idle": MediaPlayerState.IDLE,
|
||||
"standby": MediaPlayerState.STANDBY,
|
||||
"standby": MediaPlayerState.IDLE,
|
||||
"playing": MediaPlayerState.PLAYING,
|
||||
"paused": MediaPlayerState.PAUSED,
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ class AppleTvMediaPlayer(
|
||||
self._is_feature_available(FeatureName.PowerState)
|
||||
and self.atv.power.power_state == PowerState.Off
|
||||
):
|
||||
return MediaPlayerState.STANDBY
|
||||
return MediaPlayerState.OFF
|
||||
if self._playing:
|
||||
state = self._playing.device_state
|
||||
if state in (DeviceState.Idle, DeviceState.Loading):
|
||||
@ -200,7 +200,7 @@ class AppleTvMediaPlayer(
|
||||
return MediaPlayerState.PLAYING
|
||||
if state in (DeviceState.Paused, DeviceState.Seeking, DeviceState.Stopped):
|
||||
return MediaPlayerState.PAUSED
|
||||
return MediaPlayerState.STANDBY # Bad or unknown state?
|
||||
return MediaPlayerState.IDLE # Bad or unknown state?
|
||||
return None
|
||||
|
||||
@callback
|
||||
|
@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.6.0"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==3.0.0"]
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity):
|
||||
"""Return the state of the device."""
|
||||
media_state = self.client.play_state.state
|
||||
if media_state == "NETWORK":
|
||||
return MediaPlayerState.STANDBY
|
||||
return MediaPlayerState.OFF
|
||||
if self.client.state.power:
|
||||
if media_state == "play":
|
||||
return MediaPlayerState.PLAYING
|
||||
|
@ -94,6 +94,7 @@ async def _get_options_dict(handler: SchemaCommonFlowHandler | None) -> dict:
|
||||
max=6,
|
||||
mode=selector.NumberSelectorMode.BOX,
|
||||
unit_of_measurement="decimals",
|
||||
translation_key="round",
|
||||
),
|
||||
),
|
||||
vol.Required(CONF_TIME_WINDOW): selector.DurationSelector(),
|
||||
|
@ -198,6 +198,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._attr_native_value = round(Decimal(0), round_digits)
|
||||
# List of tuples with (timestamp_start, timestamp_end, derivative)
|
||||
self._state_list: list[tuple[datetime, datetime, Decimal]] = []
|
||||
self._last_valid_state_time: tuple[str, datetime] | None = None
|
||||
|
||||
self._attr_name = name if name is not None else f"{source_entity} derivative"
|
||||
self._attr_extra_state_attributes = {ATTR_SOURCE_ID: source_entity}
|
||||
@ -242,6 +243,25 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
if (current_time - time_end).total_seconds() < self._time_window
|
||||
]
|
||||
|
||||
def _handle_invalid_source_state(self, state: State | None) -> bool:
|
||||
# Check the source state for unknown/unavailable condition. If unusable, write unknown/unavailable state and return false.
|
||||
if not state or state.state == STATE_UNAVAILABLE:
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
return False
|
||||
if not _is_decimal_state(state.state):
|
||||
self._attr_available = True
|
||||
self._write_native_value(None)
|
||||
return False
|
||||
self._attr_available = True
|
||||
return True
|
||||
|
||||
def _write_native_value(self, derivative: Decimal | None) -> None:
|
||||
self._attr_native_value = (
|
||||
None if derivative is None else round(derivative, self._round_digits)
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
@ -255,8 +275,8 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
Decimal(restored_data.native_value), # type: ignore[arg-type]
|
||||
self._round_digits,
|
||||
)
|
||||
except SyntaxError as err:
|
||||
_LOGGER.warning("Could not restore last state: %s", err)
|
||||
except (InvalidOperation, TypeError):
|
||||
self._attr_native_value = None
|
||||
|
||||
def schedule_max_sub_interval_exceeded(source_state: State | None) -> None:
|
||||
"""Schedule calculation using the source state and max_sub_interval.
|
||||
@ -280,9 +300,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
|
||||
self._prune_state_list(now)
|
||||
derivative = self._calc_derivative_from_state_list(now)
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
|
||||
self.async_write_ha_state()
|
||||
self._write_native_value(derivative)
|
||||
|
||||
# If derivative is now zero, don't schedule another timeout callback, as it will have no effect
|
||||
if derivative != 0:
|
||||
@ -299,36 +317,46 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
"""Handle constant sensor state."""
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
new_state = event.data["new_state"]
|
||||
if not self._handle_invalid_source_state(new_state):
|
||||
return
|
||||
|
||||
assert new_state
|
||||
if self._attr_native_value == Decimal(0):
|
||||
# If the derivative is zero, and the source sensor hasn't
|
||||
# changed state, then we know it will still be zero.
|
||||
return
|
||||
schedule_max_sub_interval_exceeded(new_state)
|
||||
new_state = event.data["new_state"]
|
||||
if new_state is not None:
|
||||
calc_derivative(
|
||||
new_state, new_state.state, event.data["old_last_reported"]
|
||||
)
|
||||
calc_derivative(new_state, new_state.state, event.data["old_last_reported"])
|
||||
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
new_state = event.data["new_state"]
|
||||
if not self._handle_invalid_source_state(new_state):
|
||||
return
|
||||
|
||||
assert new_state
|
||||
schedule_max_sub_interval_exceeded(new_state)
|
||||
old_state = event.data["old_state"]
|
||||
if new_state is not None and old_state is not None:
|
||||
if old_state is not None:
|
||||
calc_derivative(new_state, old_state.state, old_state.last_reported)
|
||||
else:
|
||||
# On first state change from none, update availability
|
||||
self.async_write_ha_state()
|
||||
|
||||
def calc_derivative(
|
||||
new_state: State, old_value: str, old_last_reported: datetime
|
||||
) -> None:
|
||||
"""Handle the sensor state changes."""
|
||||
if old_value in (STATE_UNKNOWN, STATE_UNAVAILABLE) or new_state.state in (
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
):
|
||||
return
|
||||
if not _is_decimal_state(old_value):
|
||||
if self._last_valid_state_time:
|
||||
old_value = self._last_valid_state_time[0]
|
||||
old_last_reported = self._last_valid_state_time[1]
|
||||
else:
|
||||
# Sensor becomes valid for the first time, just keep the restored value
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
if self.native_unit_of_measurement is None:
|
||||
unit = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
@ -373,6 +401,10 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._state_list.append(
|
||||
(old_last_reported, new_state.last_reported, new_derivative)
|
||||
)
|
||||
self._last_valid_state_time = (
|
||||
new_state.state,
|
||||
new_state.last_reported,
|
||||
)
|
||||
|
||||
# If outside of time window just report derivative (is the same as modeling it in the window),
|
||||
# otherwise take the weighted average with the previous derivatives
|
||||
@ -382,11 +414,16 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
derivative = self._calc_derivative_from_state_list(
|
||||
new_state.last_reported
|
||||
)
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
self.async_write_ha_state()
|
||||
self._write_native_value(derivative)
|
||||
|
||||
source_state = self.hass.states.get(self._sensor_source_id)
|
||||
if source_state is None or source_state.state in [
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
]:
|
||||
self._attr_available = False
|
||||
|
||||
if self._max_sub_interval is not None:
|
||||
source_state = self.hass.states.get(self._sensor_source_id)
|
||||
schedule_max_sub_interval_exceeded(source_state)
|
||||
|
||||
@callback
|
||||
|
@ -52,6 +52,11 @@
|
||||
"h": "Hours",
|
||||
"d": "Days"
|
||||
}
|
||||
},
|
||||
"round": {
|
||||
"unit_of_measurement": {
|
||||
"decimals": "decimals"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -87,7 +87,22 @@ class DevoloDeviceEntity(Entity):
|
||||
self._value = message[1]
|
||||
elif len(message) == 3 and message[2] == "status":
|
||||
# Maybe the API wants to tell us, that the device went on- or offline.
|
||||
self._attr_available = self._device_instance.is_online()
|
||||
state = self._device_instance.is_online()
|
||||
if state != self.available and not state:
|
||||
_LOGGER.info(
|
||||
"Device %s is unavailable",
|
||||
self._device_instance.settings_property[
|
||||
"general_device_settings"
|
||||
].name,
|
||||
)
|
||||
if state != self.available and state:
|
||||
_LOGGER.info(
|
||||
"Device %s is back online",
|
||||
self._device_instance.settings_property[
|
||||
"general_device_settings"
|
||||
].name,
|
||||
)
|
||||
self._attr_available = state
|
||||
elif message[1] == "del" and self.platform.config_entry:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device = device_registry.async_get_device(
|
||||
|
@ -207,7 +207,7 @@ class DevoloUptimeGetCoordinator(DevoloDataUpdateCoordinator[int]):
|
||||
|
||||
|
||||
class DevoloWifiConnectedStationsGetCoordinator(
|
||||
DevoloDataUpdateCoordinator[list[ConnectedStationInfo]]
|
||||
DevoloDataUpdateCoordinator[dict[str, ConnectedStationInfo]]
|
||||
):
|
||||
"""Class to manage fetching data from the WifiGuestAccessGet endpoint."""
|
||||
|
||||
@ -230,10 +230,11 @@ class DevoloWifiConnectedStationsGetCoordinator(
|
||||
)
|
||||
self.update_method = self.async_get_wifi_connected_station
|
||||
|
||||
async def async_get_wifi_connected_station(self) -> list[ConnectedStationInfo]:
|
||||
async def async_get_wifi_connected_station(self) -> dict[str, ConnectedStationInfo]:
|
||||
"""Fetch data from API endpoint."""
|
||||
assert self.device.device
|
||||
return await self.device.device.async_get_wifi_connected_station()
|
||||
clients = await self.device.device.async_get_wifi_connected_station()
|
||||
return {client.mac_address: client for client in clients}
|
||||
|
||||
|
||||
class DevoloWifiGuestAccessGetCoordinator(
|
||||
|
@ -28,9 +28,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Get all devices and sensors and setup them via config entry."""
|
||||
device = entry.runtime_data.device
|
||||
coordinators: dict[str, DevoloDataUpdateCoordinator[list[ConnectedStationInfo]]] = (
|
||||
entry.runtime_data.coordinators
|
||||
)
|
||||
coordinators: dict[
|
||||
str, DevoloDataUpdateCoordinator[dict[str, ConnectedStationInfo]]
|
||||
] = entry.runtime_data.coordinators
|
||||
registry = er.async_get(hass)
|
||||
tracked = set()
|
||||
|
||||
@ -38,16 +38,16 @@ async def async_setup_entry(
|
||||
def new_device_callback() -> None:
|
||||
"""Add new devices if needed."""
|
||||
new_entities = []
|
||||
for station in coordinators[CONNECTED_WIFI_CLIENTS].data:
|
||||
if station.mac_address in tracked:
|
||||
for mac_address in coordinators[CONNECTED_WIFI_CLIENTS].data:
|
||||
if mac_address in tracked:
|
||||
continue
|
||||
|
||||
new_entities.append(
|
||||
DevoloScannerEntity(
|
||||
coordinators[CONNECTED_WIFI_CLIENTS], device, station.mac_address
|
||||
coordinators[CONNECTED_WIFI_CLIENTS], device, mac_address
|
||||
)
|
||||
)
|
||||
tracked.add(station.mac_address)
|
||||
tracked.add(mac_address)
|
||||
async_add_entities(new_entities)
|
||||
|
||||
@callback
|
||||
@ -82,7 +82,7 @@ async def async_setup_entry(
|
||||
|
||||
# The pylint disable is needed because of https://github.com/pylint-dev/pylint/issues/9138
|
||||
class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
CoordinatorEntity[DevoloDataUpdateCoordinator[list[ConnectedStationInfo]]],
|
||||
CoordinatorEntity[DevoloDataUpdateCoordinator[dict[str, ConnectedStationInfo]]],
|
||||
ScannerEntity,
|
||||
):
|
||||
"""Representation of a devolo device tracker."""
|
||||
@ -92,7 +92,7 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DevoloDataUpdateCoordinator[list[ConnectedStationInfo]],
|
||||
coordinator: DevoloDataUpdateCoordinator[dict[str, ConnectedStationInfo]],
|
||||
device: Device,
|
||||
mac: str,
|
||||
) -> None:
|
||||
@ -109,14 +109,8 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
if not self.coordinator.data:
|
||||
return {}
|
||||
|
||||
station = next(
|
||||
(
|
||||
station
|
||||
for station in self.coordinator.data
|
||||
if station.mac_address == self.mac_address
|
||||
),
|
||||
None,
|
||||
)
|
||||
assert self.mac_address
|
||||
station = self.coordinator.data.get(self.mac_address)
|
||||
if station:
|
||||
attrs["wifi"] = WIFI_APTYPE.get(station.vap_type, STATE_UNKNOWN)
|
||||
attrs["band"] = (
|
||||
@ -129,11 +123,8 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Return true if the device is connected to the network."""
|
||||
return any(
|
||||
station
|
||||
for station in self.coordinator.data
|
||||
if station.mac_address == self.mac_address
|
||||
)
|
||||
assert self.mac_address
|
||||
return self.coordinator.data.get(self.mac_address) is not None
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
|
@ -21,7 +21,7 @@ from .coordinator import DevoloDataUpdateCoordinator, DevoloHomeNetworkConfigEnt
|
||||
type _DataType = (
|
||||
LogicalNetwork
|
||||
| DataRate
|
||||
| list[ConnectedStationInfo]
|
||||
| dict[str, ConnectedStationInfo]
|
||||
| list[NeighborAPInfo]
|
||||
| WifiGuestAccessGet
|
||||
| bool
|
||||
|
@ -47,7 +47,11 @@ def _last_restart(runtime: int) -> datetime:
|
||||
|
||||
|
||||
type _CoordinatorDataType = (
|
||||
LogicalNetwork | DataRate | list[ConnectedStationInfo] | list[NeighborAPInfo] | int
|
||||
LogicalNetwork
|
||||
| DataRate
|
||||
| dict[str, ConnectedStationInfo]
|
||||
| list[NeighborAPInfo]
|
||||
| int
|
||||
)
|
||||
type _SensorDataType = int | float | datetime
|
||||
|
||||
@ -79,7 +83,7 @@ SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any, Any]] = {
|
||||
),
|
||||
),
|
||||
CONNECTED_WIFI_CLIENTS: DevoloSensorEntityDescription[
|
||||
list[ConnectedStationInfo], int
|
||||
dict[str, ConnectedStationInfo], int
|
||||
](
|
||||
key=CONNECTED_WIFI_CLIENTS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
|
@ -172,6 +172,9 @@ class DnsIPOptionsFlowHandler(OptionsFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
if self.config_entry.data[CONF_HOSTNAME] == DEFAULT_HOSTNAME:
|
||||
return self.async_abort(reason="no_options")
|
||||
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
resolver = user_input.get(CONF_RESOLVER, DEFAULT_RESOLVER)
|
||||
|
@ -30,7 +30,8 @@
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"no_options": "The myip hostname requires the default resolvers and therefore cannot be configured."
|
||||
},
|
||||
"error": {
|
||||
"invalid_resolver": "Invalid IP address or port for resolver"
|
||||
|
@ -65,12 +65,10 @@ def download_file(service: ServiceCall) -> None:
|
||||
|
||||
else:
|
||||
if filename is None and "content-disposition" in req.headers:
|
||||
match = re.findall(
|
||||
if match := re.search(
|
||||
r"filename=(\S+)", req.headers["content-disposition"]
|
||||
)
|
||||
|
||||
if match:
|
||||
filename = match[0].strip("'\" ")
|
||||
):
|
||||
filename = match.group(1).strip("'\" ")
|
||||
|
||||
if not filename:
|
||||
filename = os.path.basename(url).strip()
|
||||
|
@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "To identify the desired region, either the warncell ID / name or device tracker is required. The provided device tracker has to contain the attributes 'latitude' and 'longitude'.",
|
||||
"description": "To identify the desired region, either the warncell ID / name or device tracker is required. The provided device tracker has to contain the attributes 'Latitude' and 'Longitude'.",
|
||||
"data": {
|
||||
"region_identifier": "Warncell ID or name",
|
||||
"region_device_tracker": "Device tracker entity"
|
||||
@ -14,7 +14,7 @@
|
||||
"ambiguous_identifier": "The region identifier and device tracker can not be specified together.",
|
||||
"invalid_identifier": "The specified region identifier / device tracker is invalid.",
|
||||
"entity_not_found": "The specified device tracker entity was not found.",
|
||||
"attribute_not_found": "The required `latitude` or `longitude` attribute was not found in the specified device tracker."
|
||||
"attribute_not_found": "The required attributes 'Latitude' and 'Longitude' were not found in the specified device tracker."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
|
@ -10,7 +10,12 @@ from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.hub import EheimDigitalHub
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
SOURCE_USER,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@ -126,3 +131,52 @@ class EheimDigitalConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=CONFIG_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the config entry."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id=SOURCE_RECONFIGURE, data_schema=CONFIG_SCHEMA
|
||||
)
|
||||
|
||||
self._async_abort_entries_match(user_input)
|
||||
errors: dict[str, str] = {}
|
||||
hub = EheimDigitalHub(
|
||||
host=user_input[CONF_HOST],
|
||||
session=async_get_clientsession(self.hass),
|
||||
loop=self.hass.loop,
|
||||
main_device_added_event=self.main_device_added_event,
|
||||
)
|
||||
|
||||
try:
|
||||
await hub.connect()
|
||||
|
||||
async with asyncio.timeout(2):
|
||||
# This event gets triggered when the first message is received from
|
||||
# the device, it contains the data necessary to create the main device.
|
||||
# This removes the race condition where the main device is accessed
|
||||
# before the response from the device is parsed.
|
||||
await self.main_device_added_event.wait()
|
||||
if TYPE_CHECKING:
|
||||
# At this point the main device is always set
|
||||
assert isinstance(hub.main, EheimDigitalDevice)
|
||||
await self.async_set_unique_id(hub.main.mac_address)
|
||||
await hub.close()
|
||||
except (ClientError, TimeoutError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception: # noqa: BLE001
|
||||
errors["base"] = "unknown"
|
||||
LOGGER.exception("Unknown exception occurred")
|
||||
else:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=user_input,
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id=SOURCE_RECONFIGURE,
|
||||
data_schema=CONFIG_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
@ -60,7 +60,7 @@ rules:
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
stale-devices: done
|
||||
|
||||
|
@ -4,6 +4,14 @@
|
||||
"discovery_confirm": {
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::eheimdigital::config::step::user::data_description::host%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
@ -15,7 +23,9 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]"
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unique_id_mismatch": "The identifier does not match the previous identifier"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
|
@ -126,6 +126,7 @@ class EnvoyEnchargeBinarySensorEntity(EnvoyBaseBinarySensorEntity):
|
||||
name=f"Encharge {serial_number}",
|
||||
sw_version=str(encharge_inventory[self._serial_number].firmware_version),
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=serial_number,
|
||||
)
|
||||
|
||||
@property
|
||||
@ -158,6 +159,7 @@ class EnvoyEnpowerBinarySensorEntity(EnvoyBaseBinarySensorEntity):
|
||||
name=f"Enpower {enpower.serial_number}",
|
||||
sw_version=str(enpower.firmware_version),
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=enpower.serial_number,
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -220,6 +220,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
await envoy.setup()
|
||||
assert envoy.serial_number is not None
|
||||
self.envoy_serial_number = envoy.serial_number
|
||||
_LOGGER.debug("Envoy setup complete for serial: %s", self.envoy_serial_number)
|
||||
if token := self.config_entry.data.get(CONF_TOKEN):
|
||||
with contextlib.suppress(*INVALID_AUTH_ERRORS):
|
||||
# Always set the username and password
|
||||
@ -227,6 +228,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
await envoy.authenticate(
|
||||
username=self.username, password=self.password, token=token
|
||||
)
|
||||
_LOGGER.debug("Authorized, validating token lifetime")
|
||||
# The token is valid, but we still want
|
||||
# to refresh it if it's stale right away
|
||||
self._async_refresh_token_if_needed(dt_util.utcnow())
|
||||
@ -234,6 +236,8 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
# token likely expired or firmware changed
|
||||
# so we fall through to authenticate with
|
||||
# username/password
|
||||
_LOGGER.debug("setup and auth got INVALID_AUTH_ERRORS")
|
||||
_LOGGER.debug("Authenticate with username/password only")
|
||||
await self.envoy.authenticate(username=self.username, password=self.password)
|
||||
# Password auth succeeded, so we can update the token
|
||||
# if we are using EnvoyTokenAuth
|
||||
@ -262,13 +266,16 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
for tries in range(2):
|
||||
try:
|
||||
if not self._setup_complete:
|
||||
_LOGGER.debug("update on try %s, setup not complete", tries)
|
||||
await self._async_setup_and_authenticate()
|
||||
self._async_mark_setup_complete()
|
||||
# dump all received data in debug mode to assist troubleshooting
|
||||
envoy_data = await envoy.update()
|
||||
except INVALID_AUTH_ERRORS as err:
|
||||
_LOGGER.debug("update on try %s, INVALID_AUTH_ERRORS %s", tries, err)
|
||||
if self._setup_complete and tries == 0:
|
||||
# token likely expired or firmware changed, try to re-authenticate
|
||||
_LOGGER.debug("update on try %s, setup was complete, retry", tries)
|
||||
self._setup_complete = False
|
||||
continue
|
||||
raise ConfigEntryAuthFailed(
|
||||
@ -280,6 +287,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
},
|
||||
) from err
|
||||
except EnvoyError as err:
|
||||
_LOGGER.debug("update on try %s, EnvoyError %s", tries, err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="envoy_error",
|
||||
|
@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.2.0"],
|
||||
"requirements": ["pyenphase==2.2.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
@ -165,6 +165,7 @@ class EnvoyStorageSettingsNumberEntity(EnvoyBaseEntity, NumberEntity):
|
||||
name=f"Enpower {self._serial_number}",
|
||||
sw_version=str(enpower.firmware_version),
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=self._serial_number,
|
||||
)
|
||||
else:
|
||||
# If no enpower device assign numbers to Envoy itself
|
||||
|
@ -223,6 +223,7 @@ class EnvoyStorageSettingsSelectEntity(EnvoyBaseEntity, SelectEntity):
|
||||
name=f"Enpower {self._serial_number}",
|
||||
sw_version=str(enpower.firmware_version),
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=self._serial_number,
|
||||
)
|
||||
else:
|
||||
# If no enpower device assign selects to Envoy itself
|
||||
|
@ -1313,6 +1313,7 @@ class EnvoyInverterEntity(EnvoySensorBaseEntity):
|
||||
manufacturer="Enphase",
|
||||
model="Inverter",
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=serial_number,
|
||||
)
|
||||
|
||||
@property
|
||||
@ -1356,6 +1357,7 @@ class EnvoyEnchargeEntity(EnvoySensorBaseEntity):
|
||||
name=f"Encharge {serial_number}",
|
||||
sw_version=str(encharge_inventory[self._serial_number].firmware_version),
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=serial_number,
|
||||
)
|
||||
|
||||
|
||||
@ -1420,6 +1422,7 @@ class EnvoyEnpowerEntity(EnvoySensorBaseEntity):
|
||||
name=f"Enpower {enpower_data.serial_number}",
|
||||
sw_version=str(enpower_data.firmware_version),
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=enpower_data.serial_number,
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -138,6 +138,7 @@ class EnvoyEnpowerSwitchEntity(EnvoyBaseEntity, SwitchEntity):
|
||||
name=f"Enpower {self._serial_number}",
|
||||
sw_version=str(enpower.firmware_version),
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=self._serial_number,
|
||||
)
|
||||
|
||||
@property
|
||||
@ -235,6 +236,7 @@ class EnvoyStorageSettingsSwitchEntity(EnvoyBaseEntity, SwitchEntity):
|
||||
name=f"Enpower {self._serial_number}",
|
||||
sw_version=str(enpower.firmware_version),
|
||||
via_device=(DOMAIN, self.envoy_serial_num),
|
||||
serial_number=self._serial_number,
|
||||
)
|
||||
else:
|
||||
# If no enpower device assign switches to Envoy itself
|
||||
|
@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250702.0"]
|
||||
"requirements": ["home-assistant-frontend==20250702.1"]
|
||||
}
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["dacite", "gios"],
|
||||
"requirements": ["gios==6.0.0"]
|
||||
"requirements": ["gios==6.1.0"]
|
||||
}
|
||||
|
@ -2,6 +2,10 @@
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
AUTH_CALLBACK_PATH,
|
||||
MY_AUTH_CALLBACK_PATH,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||
@ -14,12 +18,14 @@ async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationSe
|
||||
|
||||
async def async_get_description_placeholders(hass: HomeAssistant) -> dict[str, str]:
|
||||
"""Return description placeholders for the credentials dialog."""
|
||||
if "my" in hass.config.components:
|
||||
redirect_url = MY_AUTH_CALLBACK_PATH
|
||||
else:
|
||||
ha_host = hass.config.external_url or "https://YOUR_DOMAIN:PORT"
|
||||
redirect_url = f"{ha_host}{AUTH_CALLBACK_PATH}"
|
||||
return {
|
||||
"oauth_consent_url": (
|
||||
"https://console.cloud.google.com/apis/credentials/consent"
|
||||
),
|
||||
"more_info_url": (
|
||||
"https://www.home-assistant.io/integrations/google_assistant_sdk/"
|
||||
),
|
||||
"oauth_consent_url": "https://console.cloud.google.com/apis/credentials/consent",
|
||||
"more_info_url": "https://www.home-assistant.io/integrations/google_assistant_sdk/",
|
||||
"oauth_creds_url": "https://console.cloud.google.com/apis/credentials",
|
||||
"redirect_url": redirect_url,
|
||||
}
|
||||
|
@ -80,10 +80,10 @@ async def async_send_text_commands(
|
||||
|
||||
credentials = Credentials(session.token[CONF_ACCESS_TOKEN]) # type: ignore[no-untyped-call]
|
||||
language_code = entry.options.get(CONF_LANGUAGE_CODE, default_language_code(hass))
|
||||
command_response_list = []
|
||||
with TextAssistant(
|
||||
credentials, language_code, audio_out=bool(media_players)
|
||||
) as assistant:
|
||||
command_response_list = []
|
||||
for command in commands:
|
||||
try:
|
||||
resp = await hass.async_add_executor_job(assistant.assist, command)
|
||||
@ -117,7 +117,7 @@ async def async_send_text_commands(
|
||||
blocking=True,
|
||||
)
|
||||
command_response_list.append(CommandResponse(text_response))
|
||||
return command_response_list
|
||||
return command_response_list
|
||||
|
||||
|
||||
def default_language_code(hass: HomeAssistant) -> str:
|
||||
|
@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["gassist-text==0.0.12"],
|
||||
"requirements": ["gassist-text==0.0.14"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -46,7 +46,7 @@
|
||||
}
|
||||
},
|
||||
"application_credentials": {
|
||||
"description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Assistant SDK. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type."
|
||||
"description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Assistant SDK. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type.\n1. Add `{redirect_url}` under *Authorized redirect URI*."
|
||||
},
|
||||
"services": {
|
||||
"send_text_command": {
|
||||
|
@ -2,14 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import mimetypes
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from types import MappingProxyType
|
||||
|
||||
from google.genai import Client
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from google.genai.types import File, FileState
|
||||
from requests.exceptions import Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
@ -37,15 +35,17 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_PROMPT,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_TITLE,
|
||||
DEFAULT_TTS_NAME,
|
||||
DOMAIN,
|
||||
FILE_POLLING_INTERVAL_SECONDS,
|
||||
LOGGER,
|
||||
RECOMMENDED_AI_TASK_OPTIONS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_TTS_OPTIONS,
|
||||
TIMEOUT_MILLIS,
|
||||
)
|
||||
from .entity import async_prepare_files_for_prompt
|
||||
|
||||
SERVICE_GENERATE_CONTENT = "generate_content"
|
||||
CONF_IMAGE_FILENAME = "image_filename"
|
||||
@ -53,6 +53,7 @@ CONF_FILENAMES = "filenames"
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = (
|
||||
Platform.AI_TASK,
|
||||
Platform.CONVERSATION,
|
||||
Platform.TTS,
|
||||
)
|
||||
@ -88,58 +89,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
client = config_entry.runtime_data
|
||||
|
||||
def append_files_to_prompt():
|
||||
image_filenames = call.data[CONF_IMAGE_FILENAME]
|
||||
filenames = call.data[CONF_FILENAMES]
|
||||
for filename in set(image_filenames + filenames):
|
||||
files = call.data[CONF_IMAGE_FILENAME] + call.data[CONF_FILENAMES]
|
||||
|
||||
if files:
|
||||
for filename in files:
|
||||
if not hass.config.is_allowed_path(filename):
|
||||
raise HomeAssistantError(
|
||||
f"Cannot read `{filename}`, no access to path; "
|
||||
"`allowlist_external_dirs` may need to be adjusted in "
|
||||
"`configuration.yaml`"
|
||||
)
|
||||
if not Path(filename).exists():
|
||||
raise HomeAssistantError(f"`{filename}` does not exist")
|
||||
mimetype = mimetypes.guess_type(filename)[0]
|
||||
with open(filename, "rb") as file:
|
||||
uploaded_file = client.files.upload(
|
||||
file=file, config={"mime_type": mimetype}
|
||||
)
|
||||
prompt_parts.append(uploaded_file)
|
||||
|
||||
async def wait_for_file_processing(uploaded_file: File) -> None:
|
||||
"""Wait for file processing to complete."""
|
||||
while True:
|
||||
uploaded_file = await client.aio.files.get(
|
||||
name=uploaded_file.name,
|
||||
config={"http_options": {"timeout": TIMEOUT_MILLIS}},
|
||||
prompt_parts.extend(
|
||||
await async_prepare_files_for_prompt(
|
||||
hass, client, [Path(filename) for filename in files]
|
||||
)
|
||||
if uploaded_file.state not in (
|
||||
FileState.STATE_UNSPECIFIED,
|
||||
FileState.PROCESSING,
|
||||
):
|
||||
break
|
||||
LOGGER.debug(
|
||||
"Waiting for file `%s` to be processed, current state: %s",
|
||||
uploaded_file.name,
|
||||
uploaded_file.state,
|
||||
)
|
||||
await asyncio.sleep(FILE_POLLING_INTERVAL_SECONDS)
|
||||
|
||||
if uploaded_file.state == FileState.FAILED:
|
||||
raise HomeAssistantError(
|
||||
f"File `{uploaded_file.name}` processing failed, reason: {uploaded_file.error.message}"
|
||||
)
|
||||
|
||||
await hass.async_add_executor_job(append_files_to_prompt)
|
||||
|
||||
tasks = [
|
||||
asyncio.create_task(wait_for_file_processing(part))
|
||||
for part in prompt_parts
|
||||
if isinstance(part, File) and part.state != FileState.ACTIVE
|
||||
]
|
||||
async with asyncio.timeout(TIMEOUT_MILLIS / 1000):
|
||||
await asyncio.gather(*tasks)
|
||||
)
|
||||
|
||||
try:
|
||||
response = await client.aio.models.generate_content(
|
||||
@ -187,11 +152,9 @@ async def async_setup_entry(
|
||||
"""Set up Google Generative AI Conversation from a config entry."""
|
||||
|
||||
try:
|
||||
|
||||
def _init_client() -> Client:
|
||||
return Client(api_key=entry.data[CONF_API_KEY])
|
||||
|
||||
client = await hass.async_add_executor_job(_init_client)
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(Client, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
await client.aio.models.get(
|
||||
model=RECOMMENDED_CHAT_MODEL,
|
||||
config={"http_options": {"timeout": TIMEOUT_MILLIS}},
|
||||
@ -350,6 +313,19 @@ async def async_migrate_entry(
|
||||
|
||||
hass.config_entries.async_update_entry(entry, minor_version=2)
|
||||
|
||||
if entry.version == 2 and entry.minor_version == 2:
|
||||
# Add AI Task subentry with default options
|
||||
hass.config_entries.async_add_subentry(
|
||||
entry,
|
||||
ConfigSubentry(
|
||||
data=MappingProxyType(RECOMMENDED_AI_TASK_OPTIONS),
|
||||
subentry_type="ai_task_data",
|
||||
title=DEFAULT_AI_TASK_NAME,
|
||||
unique_id=None,
|
||||
),
|
||||
)
|
||||
hass.config_entries.async_update_entry(entry, minor_version=3)
|
||||
|
||||
LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
@ -0,0 +1,78 @@
|
||||
"""AI Task integration for Google Generative AI Conversation."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from json import JSONDecodeError
|
||||
|
||||
from homeassistant.components import ai_task, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .const import LOGGER
|
||||
from .entity import ERROR_GETTING_RESPONSE, GoogleGenerativeAILLMBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AI Task entities."""
|
||||
for subentry in config_entry.subentries.values():
|
||||
if subentry.subentry_type != "ai_task_data":
|
||||
continue
|
||||
|
||||
async_add_entities(
|
||||
[GoogleGenerativeAITaskEntity(config_entry, subentry)],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class GoogleGenerativeAITaskEntity(
|
||||
ai_task.AITaskEntity,
|
||||
GoogleGenerativeAILLMBaseEntity,
|
||||
):
|
||||
"""Google Generative AI AI Task entity."""
|
||||
|
||||
_attr_supported_features = ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
task: ai_task.GenDataTask,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenDataTaskResult:
|
||||
"""Handle a generate data task."""
|
||||
await self._async_handle_chat_log(chat_log, task.structure)
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
LOGGER.error(
|
||||
"Last content in chat log is not an AssistantContent: %s. This could be due to the model not returning a valid response",
|
||||
chat_log.content[-1],
|
||||
)
|
||||
raise HomeAssistantError(ERROR_GETTING_RESPONSE)
|
||||
|
||||
text = chat_log.content[-1].content or ""
|
||||
|
||||
if not task.structure:
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=text,
|
||||
)
|
||||
|
||||
try:
|
||||
data = json_loads(text)
|
||||
except JSONDecodeError as err:
|
||||
LOGGER.error(
|
||||
"Failed to parse JSON response: %s. Response: %s",
|
||||
err,
|
||||
text,
|
||||
)
|
||||
raise HomeAssistantError(ERROR_GETTING_RESPONSE) from err
|
||||
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=data,
|
||||
)
|
@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
@ -46,10 +47,12 @@ from .const import (
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEFAULT_TITLE,
|
||||
DEFAULT_TTS_NAME,
|
||||
DOMAIN,
|
||||
RECOMMENDED_AI_TASK_OPTIONS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
@ -72,12 +75,14 @@ STEP_API_DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(data: dict[str, Any]) -> None:
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
client = genai.Client(api_key=data[CONF_API_KEY])
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(genai.Client, api_key=data[CONF_API_KEY])
|
||||
)
|
||||
await client.aio.models.list(
|
||||
config={
|
||||
"http_options": {
|
||||
@ -92,7 +97,7 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Google Generative AI Conversation."""
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 2
|
||||
MINOR_VERSION = 3
|
||||
|
||||
async def async_step_api(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -102,7 +107,7 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(user_input)
|
||||
try:
|
||||
await validate_input(user_input)
|
||||
await validate_input(self.hass, user_input)
|
||||
except (APIError, Timeout) as err:
|
||||
if isinstance(err, ClientError) and "API_KEY_INVALID" in str(err):
|
||||
errors["base"] = "invalid_auth"
|
||||
@ -133,6 +138,12 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"title": DEFAULT_TTS_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
{
|
||||
"subentry_type": "ai_task_data",
|
||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||
"title": DEFAULT_AI_TASK_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
],
|
||||
)
|
||||
return self.async_show_form(
|
||||
@ -181,6 +192,7 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return {
|
||||
"conversation": LLMSubentryFlowHandler,
|
||||
"tts": LLMSubentryFlowHandler,
|
||||
"ai_task_data": LLMSubentryFlowHandler,
|
||||
}
|
||||
|
||||
|
||||
@ -214,6 +226,8 @@ class LLMSubentryFlowHandler(ConfigSubentryFlow):
|
||||
options: dict[str, Any]
|
||||
if self._subentry_type == "tts":
|
||||
options = RECOMMENDED_TTS_OPTIONS.copy()
|
||||
elif self._subentry_type == "ai_task_data":
|
||||
options = RECOMMENDED_AI_TASK_OPTIONS.copy()
|
||||
else:
|
||||
options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
||||
else:
|
||||
@ -288,6 +302,8 @@ async def google_generative_ai_config_option_schema(
|
||||
default_name = options[CONF_NAME]
|
||||
elif subentry_type == "tts":
|
||||
default_name = DEFAULT_TTS_NAME
|
||||
elif subentry_type == "ai_task_data":
|
||||
default_name = DEFAULT_AI_TASK_NAME
|
||||
else:
|
||||
default_name = DEFAULT_CONVERSATION_NAME
|
||||
schema: dict[vol.Required | vol.Optional, Any] = {
|
||||
@ -315,6 +331,7 @@ async def google_generative_ai_config_option_schema(
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
schema.update(
|
||||
{
|
||||
vol.Required(
|
||||
@ -443,4 +460,5 @@ async def google_generative_ai_config_option_schema(
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
|
||||
return schema
|
||||
|
@ -12,6 +12,7 @@ CONF_PROMPT = "prompt"
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Google AI Conversation"
|
||||
DEFAULT_TTS_NAME = "Google AI TTS"
|
||||
DEFAULT_AI_TASK_NAME = "Google AI Task"
|
||||
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
@ -35,6 +36,7 @@ RECOMMENDED_USE_GOOGLE_SEARCH_TOOL = False
|
||||
|
||||
TIMEOUT_MILLIS = 10000
|
||||
FILE_POLLING_INTERVAL_SECONDS = 0.05
|
||||
|
||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
@ -44,3 +46,7 @@ RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
RECOMMENDED_TTS_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
||||
RECOMMENDED_AI_TASK_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
@ -2,15 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import codecs
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from dataclasses import replace
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
from typing import Any, cast
|
||||
|
||||
from google.genai import Client
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from google.genai.types import (
|
||||
AutomaticFunctionCallingConfig,
|
||||
Content,
|
||||
File,
|
||||
FileState,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
GenerateContentResponse,
|
||||
@ -21,10 +27,12 @@ from google.genai.types import (
|
||||
Schema,
|
||||
Tool,
|
||||
)
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@ -41,6 +49,7 @@ from .const import (
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
FILE_POLLING_INTERVAL_SECONDS,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
@ -48,6 +57,7 @@ from .const import (
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
TIMEOUT_MILLIS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@ -324,6 +334,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
structure: vol.Schema | None = None,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
@ -402,6 +413,18 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
generateContentConfig.automatic_function_calling = (
|
||||
AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None)
|
||||
)
|
||||
if structure:
|
||||
generateContentConfig.response_mime_type = "application/json"
|
||||
generateContentConfig.response_schema = _format_schema(
|
||||
convert(
|
||||
structure,
|
||||
custom_serializer=(
|
||||
chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
@ -480,3 +503,68 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
async def async_prepare_files_for_prompt(
|
||||
hass: HomeAssistant, client: Client, files: list[Path]
|
||||
) -> list[File]:
|
||||
"""Append files to a prompt.
|
||||
|
||||
Caller needs to ensure that the files are allowed.
|
||||
"""
|
||||
|
||||
def upload_files() -> list[File]:
|
||||
prompt_parts: list[File] = []
|
||||
for filename in files:
|
||||
if not filename.exists():
|
||||
raise HomeAssistantError(f"`{filename}` does not exist")
|
||||
mimetype = mimetypes.guess_type(filename)[0]
|
||||
prompt_parts.append(
|
||||
client.files.upload(
|
||||
file=filename,
|
||||
config={
|
||||
"mime_type": mimetype,
|
||||
"display_name": filename.name,
|
||||
},
|
||||
)
|
||||
)
|
||||
return prompt_parts
|
||||
|
||||
async def wait_for_file_processing(uploaded_file: File) -> None:
|
||||
"""Wait for file processing to complete."""
|
||||
first = True
|
||||
while uploaded_file.state in (
|
||||
FileState.STATE_UNSPECIFIED,
|
||||
FileState.PROCESSING,
|
||||
):
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Waiting for file `%s` to be processed, current state: %s",
|
||||
uploaded_file.name,
|
||||
uploaded_file.state,
|
||||
)
|
||||
await asyncio.sleep(FILE_POLLING_INTERVAL_SECONDS)
|
||||
|
||||
uploaded_file = await client.aio.files.get(
|
||||
name=uploaded_file.name,
|
||||
config={"http_options": {"timeout": TIMEOUT_MILLIS}},
|
||||
)
|
||||
|
||||
if uploaded_file.state == FileState.FAILED:
|
||||
raise HomeAssistantError(
|
||||
f"File `{uploaded_file.name}` processing failed, reason: {uploaded_file.error.message}"
|
||||
)
|
||||
|
||||
prompt_parts = await hass.async_add_executor_job(upload_files)
|
||||
|
||||
tasks = [
|
||||
asyncio.create_task(wait_for_file_processing(part))
|
||||
for part in prompt_parts
|
||||
if part.state != FileState.ACTIVE
|
||||
]
|
||||
async with asyncio.timeout(TIMEOUT_MILLIS / 1000):
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
return prompt_parts
|
||||
|
@ -88,6 +88,34 @@
|
||||
"entry_not_loaded": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::abort::entry_not_loaded%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
},
|
||||
"ai_task_data": {
|
||||
"initiate_flow": {
|
||||
"user": "Add Generate data with AI service",
|
||||
"reconfigure": "Reconfigure Generate data with AI service"
|
||||
},
|
||||
"entry_type": "Generate data with AI service",
|
||||
"step": {
|
||||
"set_options": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"recommended": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::recommended%]",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"temperature": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::temperature%]",
|
||||
"top_p": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::top_p%]",
|
||||
"top_k": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::top_k%]",
|
||||
"max_tokens": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::max_tokens%]",
|
||||
"harassment_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::harassment_block_threshold%]",
|
||||
"hate_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::hate_block_threshold%]",
|
||||
"sexual_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::sexual_block_threshold%]",
|
||||
"dangerous_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::dangerous_block_threshold%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"entry_not_loaded": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::abort::entry_not_loaded%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
@ -95,21 +95,16 @@ def get_recurrence_rule(recurrence: rrule) -> str:
|
||||
|
||||
'DTSTART:YYYYMMDDTHHMMSS\nRRULE:FREQ=YEARLY;INTERVAL=2'
|
||||
|
||||
Parameters
|
||||
----------
|
||||
recurrence : rrule
|
||||
An RRULE object.
|
||||
Args:
|
||||
recurrence: An RRULE object.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
Returns:
|
||||
The recurrence rule portion of the RRULE string, starting with 'FREQ='.
|
||||
|
||||
Example
|
||||
-------
|
||||
>>> rule = get_recurrence_rule(task)
|
||||
>>> print(rule)
|
||||
'FREQ=YEARLY;INTERVAL=2'
|
||||
Example:
|
||||
>>> rule = get_recurrence_rule(task)
|
||||
>>> print(rule)
|
||||
'FREQ=YEARLY;INTERVAL=2'
|
||||
|
||||
"""
|
||||
return str(recurrence).split("RRULE:")[1]
|
||||
|
@ -61,8 +61,7 @@
|
||||
"init": {
|
||||
"data": {
|
||||
"traffic_mode": "Traffic mode",
|
||||
"route_mode": "Route mode",
|
||||
"unit_system": "Unit system"
|
||||
"route_mode": "Route mode"
|
||||
}
|
||||
},
|
||||
"time_menu": {
|
||||
|
@ -3,11 +3,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from datetime import timedelta
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaCommonFlowHandler,
|
||||
SchemaConfigFlowHandler,
|
||||
@ -26,6 +30,7 @@ from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
)
|
||||
from homeassistant.helpers.template import Template
|
||||
|
||||
from .const import (
|
||||
CONF_DURATION,
|
||||
@ -37,14 +42,21 @@ from .const import (
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
)
|
||||
from .coordinator import HistoryStatsUpdateCoordinator
|
||||
from .data import HistoryStats
|
||||
from .sensor import HistoryStatsSensor
|
||||
|
||||
|
||||
def _validate_two_period_keys(user_input: dict[str, Any]) -> None:
|
||||
if sum(param in user_input for param in CONF_PERIOD_KEYS) != 2:
|
||||
raise SchemaFlowError("only_two_keys_allowed")
|
||||
|
||||
|
||||
async def validate_options(
|
||||
handler: SchemaCommonFlowHandler, user_input: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Validate options selected."""
|
||||
if sum(param in user_input for param in CONF_PERIOD_KEYS) != 2:
|
||||
raise SchemaFlowError("only_two_keys_allowed")
|
||||
_validate_two_period_keys(user_input)
|
||||
|
||||
handler.parent_handler._async_abort_entries_match({**handler.options, **user_input}) # noqa: SLF001
|
||||
|
||||
@ -97,12 +109,14 @@ CONFIG_FLOW = {
|
||||
"options": SchemaFlowFormStep(
|
||||
schema=DATA_SCHEMA_OPTIONS,
|
||||
validate_user_input=validate_options,
|
||||
preview="history_stats",
|
||||
),
|
||||
}
|
||||
OPTIONS_FLOW = {
|
||||
"init": SchemaFlowFormStep(
|
||||
DATA_SCHEMA_OPTIONS,
|
||||
validate_user_input=validate_options,
|
||||
preview="history_stats",
|
||||
),
|
||||
}
|
||||
|
||||
@ -116,3 +130,115 @@ class HistoryStatsConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
return cast(str, options[CONF_NAME])
|
||||
|
||||
@staticmethod
|
||||
async def async_setup_preview(hass: HomeAssistant) -> None:
|
||||
"""Set up preview WS API."""
|
||||
websocket_api.async_register_command(hass, ws_start_preview)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "history_stats/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Required("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Required("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_start_preview(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a preview."""
|
||||
if msg["flow_type"] == "config_flow":
|
||||
flow_status = hass.config_entries.flow.async_get(msg["flow_id"])
|
||||
flow_sets = hass.config_entries.flow._handler_progress_index.get( # noqa: SLF001
|
||||
flow_status["handler"]
|
||||
)
|
||||
options = {}
|
||||
assert flow_sets
|
||||
for active_flow in flow_sets:
|
||||
options = active_flow._common_handler.options # type: ignore [attr-defined] # noqa: SLF001
|
||||
config_entry = hass.config_entries.async_get_entry(flow_status["handler"])
|
||||
entity_id = options[CONF_ENTITY_ID]
|
||||
name = options[CONF_NAME]
|
||||
else:
|
||||
flow_status = hass.config_entries.options.async_get(msg["flow_id"])
|
||||
config_entry = hass.config_entries.async_get_entry(flow_status["handler"])
|
||||
if not config_entry:
|
||||
raise HomeAssistantError("Config entry not found")
|
||||
entity_id = config_entry.options[CONF_ENTITY_ID]
|
||||
name = config_entry.options[CONF_NAME]
|
||||
|
||||
@callback
|
||||
def async_preview_updated(
|
||||
last_exception: Exception | None, state: str, attributes: Mapping[str, Any]
|
||||
) -> None:
|
||||
"""Forward config entry state events to websocket."""
|
||||
if last_exception:
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"], {"error": str(last_exception) or "Unknown error"}
|
||||
)
|
||||
)
|
||||
else:
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"], {"attributes": attributes, "state": state}
|
||||
)
|
||||
)
|
||||
|
||||
for param in CONF_PERIOD_KEYS:
|
||||
if param in msg["user_input"] and not bool(msg["user_input"][param]):
|
||||
del msg["user_input"][param] # Remove falsy values before counting keys
|
||||
|
||||
validated_data: Any = None
|
||||
try:
|
||||
validated_data = DATA_SCHEMA_OPTIONS(msg["user_input"])
|
||||
except vol.Invalid as ex:
|
||||
connection.send_error(msg["id"], "invalid_schema", str(ex))
|
||||
return
|
||||
|
||||
try:
|
||||
_validate_two_period_keys(validated_data)
|
||||
except SchemaFlowError:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"invalid_schema",
|
||||
f"Exactly two of {', '.join(CONF_PERIOD_KEYS)} required",
|
||||
)
|
||||
return
|
||||
|
||||
sensor_type = validated_data.get(CONF_TYPE)
|
||||
entity_states = validated_data.get(CONF_STATE)
|
||||
start = validated_data.get(CONF_START)
|
||||
end = validated_data.get(CONF_END)
|
||||
duration = validated_data.get(CONF_DURATION)
|
||||
|
||||
history_stats = HistoryStats(
|
||||
hass,
|
||||
entity_id,
|
||||
entity_states,
|
||||
Template(start, hass) if start else None,
|
||||
Template(end, hass) if end else None,
|
||||
timedelta(**duration) if duration else None,
|
||||
True,
|
||||
)
|
||||
coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, None, name, True)
|
||||
await coordinator.async_refresh()
|
||||
preview_entity = HistoryStatsSensor(
|
||||
hass, coordinator, sensor_type, name, None, entity_id
|
||||
)
|
||||
preview_entity.hass = hass
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
cancel_listener = coordinator.async_setup_state_listener()
|
||||
cancel_preview = await preview_entity.async_start_preview(async_preview_updated)
|
||||
|
||||
def unsub() -> None:
|
||||
cancel_listener()
|
||||
cancel_preview()
|
||||
|
||||
connection.subscriptions[msg["id"]] = unsub
|
||||
|
@ -36,12 +36,14 @@ class HistoryStatsUpdateCoordinator(DataUpdateCoordinator[HistoryStatsState]):
|
||||
history_stats: HistoryStats,
|
||||
config_entry: ConfigEntry | None,
|
||||
name: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""Initialize DataUpdateCoordinator."""
|
||||
self._history_stats = history_stats
|
||||
self._subscriber_count = 0
|
||||
self._at_start_listener: CALLBACK_TYPE | None = None
|
||||
self._track_events_listener: CALLBACK_TYPE | None = None
|
||||
self._preview = preview
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
@ -104,3 +106,8 @@ class HistoryStatsUpdateCoordinator(DataUpdateCoordinator[HistoryStatsState]):
|
||||
return await self._history_stats.async_update(None)
|
||||
except (TemplateError, TypeError, ValueError) as ex:
|
||||
raise UpdateFailed(ex) from ex
|
||||
|
||||
async def async_refresh(self) -> None:
|
||||
"""Refresh data and log errors."""
|
||||
log_failures = not self._preview
|
||||
await self._async_refresh(log_failures)
|
||||
|
@ -47,6 +47,7 @@ class HistoryStats:
|
||||
start: Template | None,
|
||||
end: Template | None,
|
||||
duration: datetime.timedelta | None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""Init the history stats manager."""
|
||||
self.hass = hass
|
||||
@ -59,6 +60,7 @@ class HistoryStats:
|
||||
self._duration = duration
|
||||
self._start = start
|
||||
self._end = end
|
||||
self._preview = preview
|
||||
|
||||
self._pending_events: list[Event[EventStateChangedData]] = []
|
||||
self._query_count = 0
|
||||
@ -70,7 +72,9 @@ class HistoryStats:
|
||||
# Get previous values of start and end
|
||||
previous_period_start, previous_period_end = self._period
|
||||
# Parse templates
|
||||
self._period = async_calculate_period(self._duration, self._start, self._end)
|
||||
self._period = async_calculate_period(
|
||||
self._duration, self._start, self._end, log_errors=not self._preview
|
||||
)
|
||||
# Get the current period
|
||||
current_period_start, current_period_end = self._period
|
||||
|
||||
|
@ -23,6 +23,7 @@ def async_calculate_period(
|
||||
duration: datetime.timedelta | None,
|
||||
start_template: Template | None,
|
||||
end_template: Template | None,
|
||||
log_errors: bool = True,
|
||||
) -> tuple[datetime.datetime, datetime.datetime]:
|
||||
"""Parse the templates and return the period."""
|
||||
bounds: dict[str, datetime.datetime | None] = {
|
||||
@ -37,13 +38,17 @@ def async_calculate_period(
|
||||
if template is None:
|
||||
continue
|
||||
try:
|
||||
rendered = template.async_render()
|
||||
rendered = template.async_render(
|
||||
log_fn=None if log_errors else lambda *args, **kwargs: None
|
||||
)
|
||||
except (TemplateError, TypeError) as ex:
|
||||
if ex.args and not ex.args[0].startswith(
|
||||
"UndefinedError: 'None' has no attribute"
|
||||
if (
|
||||
log_errors
|
||||
and ex.args
|
||||
and not ex.args[0].startswith("UndefinedError: 'None' has no attribute")
|
||||
):
|
||||
_LOGGER.error("Error parsing template for field %s", bound, exc_info=ex)
|
||||
raise
|
||||
raise type(ex)(f"Error parsing template for field {bound}: {ex}") from ex
|
||||
if isinstance(rendered, str):
|
||||
bounds[bound] = dt_util.parse_datetime(rendered)
|
||||
if bounds[bound] is not None:
|
||||
|
@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Mapping
|
||||
import datetime
|
||||
from typing import Any
|
||||
|
||||
@ -23,7 +24,7 @@ from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
@ -183,6 +184,9 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
) -> None:
|
||||
"""Initialize the HistoryStats sensor."""
|
||||
super().__init__(coordinator, name)
|
||||
self._preview_callback: (
|
||||
Callable[[Exception | None, str, Mapping[str, Any]], None] | None
|
||||
) = None
|
||||
self._attr_native_unit_of_measurement = UNITS[sensor_type]
|
||||
self._type = sensor_type
|
||||
self._attr_unique_id = unique_id
|
||||
@ -212,3 +216,29 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
self._attr_native_value = pretty_ratio(state.seconds_matched, state.period)
|
||||
elif self._type == CONF_TYPE_COUNT:
|
||||
self._attr_native_value = state.match_count
|
||||
|
||||
if self._preview_callback:
|
||||
calculated_state = self._async_calculate_state()
|
||||
self._preview_callback(
|
||||
None, calculated_state.state, calculated_state.attributes
|
||||
)
|
||||
|
||||
async def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[Exception | None, str, Mapping[str, Any]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
self.async_on_remove(
|
||||
self.coordinator.async_add_listener(self._process_update, None)
|
||||
)
|
||||
|
||||
self._preview_callback = preview_callback
|
||||
calculated_state = self._async_calculate_state()
|
||||
preview_callback(
|
||||
self.coordinator.last_exception,
|
||||
calculated_state.state,
|
||||
calculated_state.attributes,
|
||||
)
|
||||
|
||||
return self._call_on_remove_callbacks
|
||||
|
@ -42,16 +42,16 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.entity_component import async_update_entity
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity
|
||||
from homeassistant.helpers.selector import (
|
||||
TargetSelectorData,
|
||||
async_extract_referenced_entity_ids,
|
||||
)
|
||||
from homeassistant.helpers.service import (
|
||||
async_extract_config_entry_ids,
|
||||
async_register_admin_service,
|
||||
)
|
||||
from homeassistant.helpers.signal import KEY_HA_STOP
|
||||
from homeassistant.helpers.system_info import async_get_system_info
|
||||
from homeassistant.helpers.target import (
|
||||
TargetSelectorData,
|
||||
async_extract_referenced_entity_ids,
|
||||
)
|
||||
from homeassistant.helpers.template import async_load_custom_templates
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
|
@ -16,6 +16,7 @@ from homeassistant.const import (
|
||||
CONF_PLATFORM,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
WEEKDAYS,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
@ -37,6 +38,8 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
CONF_WEEKDAY = "weekday"
|
||||
|
||||
_TIME_TRIGGER_ENTITY = vol.All(str, cv.entity_domain(["input_datetime", "sensor"]))
|
||||
_TIME_AT_SCHEMA = vol.Any(cv.time, _TIME_TRIGGER_ENTITY)
|
||||
|
||||
@ -74,6 +77,10 @@ TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): "time",
|
||||
vol.Required(CONF_AT): vol.All(cv.ensure_list, [_TIME_TRIGGER_SCHEMA]),
|
||||
vol.Optional(CONF_WEEKDAY): vol.Any(
|
||||
vol.In(WEEKDAYS),
|
||||
vol.All(cv.ensure_list, [vol.In(WEEKDAYS)]),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@ -85,7 +92,7 @@ class TrackEntity(NamedTuple):
|
||||
callback: Callable
|
||||
|
||||
|
||||
async def async_attach_trigger(
|
||||
async def async_attach_trigger( # noqa: C901
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
action: TriggerActionType,
|
||||
@ -103,6 +110,18 @@ async def async_attach_trigger(
|
||||
description: str, now: datetime, *, entity_id: str | None = None
|
||||
) -> None:
|
||||
"""Listen for time changes and calls action."""
|
||||
# Check weekday filter if configured
|
||||
if CONF_WEEKDAY in config:
|
||||
weekday_config = config[CONF_WEEKDAY]
|
||||
current_weekday = WEEKDAYS[now.weekday()]
|
||||
|
||||
# Check if current weekday matches the configuration
|
||||
if isinstance(weekday_config, str):
|
||||
if current_weekday != weekday_config:
|
||||
return
|
||||
elif current_weekday not in weekday_config:
|
||||
return
|
||||
|
||||
hass.async_run_hass_job(
|
||||
job,
|
||||
{
|
||||
|
@ -7,7 +7,7 @@ from pyHomee import Homee, HomeeAuthFailedException, HomeeConnectionFailedExcept
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
@ -53,12 +53,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeeConfigEntry) -> boo
|
||||
try:
|
||||
await homee.get_access_token()
|
||||
except HomeeConnectionFailedException as exc:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Connection to Homee failed: {exc.__cause__}"
|
||||
) from exc
|
||||
raise ConfigEntryNotReady(f"Connection to Homee failed: {exc.reason}") from exc
|
||||
except HomeeAuthFailedException as exc:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Authentication to Homee failed: {exc.__cause__}"
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Authentication to Homee failed: {exc.reason}"
|
||||
) from exc
|
||||
|
||||
hass.loop.create_task(homee.run())
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Config flow for homee integration."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@ -32,6 +33,8 @@ class HomeeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
homee: Homee
|
||||
_reauth_host: str
|
||||
_reauth_username: str
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -84,6 +87,63 @@ class HomeeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
self._reauth_host = entry_data[CONF_HOST]
|
||||
self._reauth_username = entry_data[CONF_USERNAME]
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input:
|
||||
self.homee = Homee(
|
||||
self._reauth_host, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
|
||||
)
|
||||
try:
|
||||
await self.homee.get_access_token()
|
||||
except HomeeConnectionFailedException:
|
||||
errors["base"] = "cannot_connect"
|
||||
except HomeeAuthenticationFailedException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self.hass.loop.create_task(self.homee.run())
|
||||
await self.homee.wait_until_connected()
|
||||
self.homee.disconnect()
|
||||
await self.homee.wait_until_disconnected()
|
||||
|
||||
await self.async_set_unique_id(self.homee.settings.uid)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_hub")
|
||||
|
||||
_LOGGER.debug(
|
||||
"Reauthenticated homee entry with ID %s", self.homee.settings.uid
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data_updates=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME, default=self._reauth_username): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"host": self._reauth_host,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@ -3,8 +3,9 @@
|
||||
"flow_title": "homee {name} ({host})",
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_hub": "Address belongs to a different homee."
|
||||
"wrong_hub": "IP address belongs to a different homee than the configured one."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@ -25,6 +26,17 @@
|
||||
"password": "The password for your homee."
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"username": "[%key:component::homee::config::step::user::data_description::username%]",
|
||||
"password": "[%key:component::homee::config::step::user::data_description::password%]"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure homee {name}",
|
||||
"description": "Reconfigure the IP address of your homee.",
|
||||
@ -32,7 +44,7 @@
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The IP address of your homee."
|
||||
"host": "[%key:component::homee::config::step::user::data_description::host%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -75,12 +75,12 @@ from homeassistant.helpers.entityfilter import (
|
||||
EntityFilter,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.selector import (
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
from homeassistant.helpers.target import (
|
||||
TargetSelectorData,
|
||||
async_extract_referenced_entity_ids,
|
||||
)
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import IntegrationNotFound, async_get_integration
|
||||
from homeassistant.util.async_ import create_eager_task
|
||||
|
@ -64,7 +64,7 @@ def setup_bans(hass: HomeAssistant, app: Application, login_threshold: int) -> N
|
||||
"""Initialize bans when app starts up."""
|
||||
await app[KEY_BAN_MANAGER].async_load()
|
||||
|
||||
app.on_startup.append(ban_startup)
|
||||
app.on_startup.append(ban_startup) # type: ignore[arg-type]
|
||||
|
||||
|
||||
@middleware
|
||||
|
@ -74,7 +74,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
"""Subscribe for websocket and poll data from the API."""
|
||||
if not self.ws_connected:
|
||||
await self.api.connect()
|
||||
self.api.register_data_callback(self.callback)
|
||||
self.api.register_data_callback(self.handle_websocket_updates)
|
||||
self.ws_connected = True
|
||||
try:
|
||||
data = await self.api.get_status()
|
||||
@ -86,11 +86,27 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
return data
|
||||
|
||||
@callback
|
||||
def callback(self, ws_data: MowerDictionary) -> None:
|
||||
def handle_websocket_updates(self, ws_data: MowerDictionary) -> None:
|
||||
"""Process websocket callbacks and write them to the DataUpdateCoordinator."""
|
||||
self.async_set_updated_data(ws_data)
|
||||
self._async_add_remove_devices_and_entities(ws_data)
|
||||
|
||||
@callback
|
||||
def async_set_updated_data(self, data: MowerDictionary) -> None:
|
||||
"""Override DataUpdateCoordinator to preserve fixed polling interval.
|
||||
|
||||
The built-in implementation resets the polling timer on every websocket
|
||||
update. Since websockets do not deliver all required data (e.g. statistics
|
||||
or work area details), we enforce a constant REST polling cadence.
|
||||
"""
|
||||
self.data = data
|
||||
self.last_update_success = True
|
||||
self.logger.debug(
|
||||
"Manually updated %s data",
|
||||
self.name,
|
||||
)
|
||||
self.async_update_listeners()
|
||||
|
||||
async def client_listen(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
|
@ -3,9 +3,6 @@
|
||||
"binary_sensor": {
|
||||
"leaving_dock": {
|
||||
"default": "mdi:debug-step-out"
|
||||
},
|
||||
"returning_to_dock": {
|
||||
"default": "mdi:debug-step-into"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
@ -48,6 +45,26 @@
|
||||
"work_area_progress": {
|
||||
"default": "mdi:collage"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"my_lawn_work_area": {
|
||||
"default": "mdi:square-outline",
|
||||
"state": {
|
||||
"on": "mdi:square"
|
||||
}
|
||||
},
|
||||
"work_area_work_area": {
|
||||
"default": "mdi:square-outline",
|
||||
"state": {
|
||||
"on": "mdi:square"
|
||||
}
|
||||
},
|
||||
"stay_out_zones": {
|
||||
"default": "mdi:rhombus-outline",
|
||||
"state": {
|
||||
"on": "mdi:rhombus"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==1.0.1"]
|
||||
"requirements": ["aioautomower==1.2.0"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hydrawise",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydrawise"],
|
||||
"requirements": ["pydrawise==2025.6.0"]
|
||||
"requirements": ["pydrawise==2025.7.0"]
|
||||
}
|
||||
|
@ -2,8 +2,8 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Configure Iskra Device",
|
||||
"description": "Enter the IP address of your Iskra Device and select protocol.",
|
||||
"title": "Configure Iskra device",
|
||||
"description": "Enter the IP address of your Iskra device and select protocol.",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
@ -12,7 +12,7 @@
|
||||
}
|
||||
},
|
||||
"authentication": {
|
||||
"title": "Configure Rest API Credentials",
|
||||
"title": "Configure REST API credentials",
|
||||
"description": "Enter username and password",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
@ -44,7 +44,7 @@
|
||||
"selector": {
|
||||
"protocol": {
|
||||
"options": {
|
||||
"rest_api": "Rest API",
|
||||
"rest_api": "REST API",
|
||||
"modbus_tcp": "Modbus TCP"
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,12 @@ from urllib.parse import urlparse
|
||||
from ndms2_client import Client, ConnectionException, InterfaceInfo, TelnetConnection
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
@ -45,7 +50,7 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
host: str | bytes | None = None
|
||||
_host: str | bytes | None = None
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
@ -61,8 +66,9 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a flow initialized by the user."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
host = self.host or user_input[CONF_HOST]
|
||||
self._async_abort_entries_match({CONF_HOST: host})
|
||||
host = self._host or user_input[CONF_HOST]
|
||||
if self.source != SOURCE_RECONFIGURE:
|
||||
self._async_abort_entries_match({CONF_HOST: host})
|
||||
|
||||
_client = Client(
|
||||
TelnetConnection(
|
||||
@ -81,12 +87,17 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
except ConnectionException:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data={CONF_HOST: host, **user_input},
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=router_info.name, data={CONF_HOST: host, **user_input}
|
||||
)
|
||||
|
||||
host_schema: VolDictType = (
|
||||
{vol.Required(CONF_HOST): str} if not self.host else {}
|
||||
{vol.Required(CONF_HOST): str} if not self._host else {}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
@ -102,6 +113,15 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration."""
|
||||
existing_entry_data = dict(self._get_reconfigure_entry().data)
|
||||
self._host = existing_entry_data[CONF_HOST]
|
||||
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: SsdpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
@ -124,7 +144,7 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: host})
|
||||
|
||||
self.host = host
|
||||
self._host = host
|
||||
self.context["title_placeholders"] = {
|
||||
"name": friendly_name,
|
||||
"host": host,
|
||||
|
@ -21,7 +21,8 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"no_udn": "SSDP discovery info has no UDN",
|
||||
"not_keenetic_ndms2": "Discovered device is not a Keenetic router"
|
||||
"not_keenetic_ndms2": "Discovered device is not a Keenetic router",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.0.9"]
|
||||
"requirements": ["pylamarzocco==2.0.10"]
|
||||
}
|
||||
|
@ -56,6 +56,13 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
CoffeeBoiler, config[WidgetType.CM_COFFEE_BOILER]
|
||||
).ready_start_time
|
||||
),
|
||||
available_fn=(
|
||||
lambda coordinator: cast(
|
||||
CoffeeBoiler,
|
||||
coordinator.device.dashboard.config[WidgetType.CM_COFFEE_BOILER],
|
||||
).ready_start_time
|
||||
is not None
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
@ -67,11 +74,18 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
SteamBoilerLevel, config[WidgetType.CM_STEAM_BOILER_LEVEL]
|
||||
).ready_start_time
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MICRA, ModelName.LINEA_MINI_R)
|
||||
),
|
||||
available_fn=(
|
||||
lambda coordinator: cast(
|
||||
SteamBoilerLevel,
|
||||
coordinator.device.dashboard.config[WidgetType.CM_STEAM_BOILER_LEVEL],
|
||||
).ready_start_time
|
||||
is not None
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="brewing_start_time",
|
||||
|
@ -5,23 +5,16 @@ from functools import partial
|
||||
|
||||
import pypck
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as DOMAIN_BINARY_SENSOR,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES, CONF_SOURCE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import BINSENSOR_PORTS, CONF_DOMAIN_DATA, DOMAIN, SETPOINTS
|
||||
from .const import CONF_DOMAIN_DATA
|
||||
from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
@ -34,15 +27,9 @@ def add_lcn_entities(
|
||||
entity_configs: Iterable[ConfigType],
|
||||
) -> None:
|
||||
"""Add entities for this domain."""
|
||||
entities: list[LcnRegulatorLockSensor | LcnBinarySensor | LcnLockKeysSensor] = []
|
||||
for entity_config in entity_configs:
|
||||
if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in SETPOINTS:
|
||||
entities.append(LcnRegulatorLockSensor(entity_config, config_entry))
|
||||
elif entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in BINSENSOR_PORTS:
|
||||
entities.append(LcnBinarySensor(entity_config, config_entry))
|
||||
else: # in KEY
|
||||
entities.append(LcnLockKeysSensor(entity_config, config_entry))
|
||||
|
||||
entities = [
|
||||
LcnBinarySensor(entity_config, config_entry) for entity_config in entity_configs
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@ -71,65 +58,6 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class LcnRegulatorLockSensor(LcnEntity, BinarySensorEntity):
|
||||
"""Representation of a LCN binary sensor for regulator locks."""
|
||||
|
||||
def __init__(self, config: ConfigType, config_entry: LcnConfigEntry) -> None:
|
||||
"""Initialize the LCN binary sensor."""
|
||||
super().__init__(config, config_entry)
|
||||
|
||||
self.setpoint_variable = pypck.lcn_defs.Var[
|
||||
config[CONF_DOMAIN_DATA][CONF_SOURCE]
|
||||
]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
self.setpoint_variable
|
||||
)
|
||||
|
||||
entity_automations = automations_with_entity(self.hass, self.entity_id)
|
||||
entity_scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
if entity_automations + entity_scripts:
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_binary_sensor_{self.entity_id}",
|
||||
breaks_in_ha_version="2025.5.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_regulatorlock_sensor",
|
||||
translation_placeholders={
|
||||
"entity": f"{DOMAIN_BINARY_SENSOR}.{self.name.lower().replace(' ', '_')}",
|
||||
},
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
self.setpoint_variable
|
||||
)
|
||||
async_delete_issue(
|
||||
self.hass, DOMAIN, f"deprecated_binary_sensor_{self.entity_id}"
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
if (
|
||||
not isinstance(input_obj, pypck.inputs.ModStatusVar)
|
||||
or input_obj.get_var() != self.setpoint_variable
|
||||
):
|
||||
return
|
||||
|
||||
self._attr_is_on = input_obj.get_value().is_locked_regulator()
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class LcnBinarySensor(LcnEntity, BinarySensorEntity):
|
||||
"""Representation of a LCN binary sensor for binary sensor ports."""
|
||||
|
||||
@ -164,59 +92,3 @@ class LcnBinarySensor(LcnEntity, BinarySensorEntity):
|
||||
|
||||
self._attr_is_on = input_obj.get_state(self.bin_sensor_port.value)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class LcnLockKeysSensor(LcnEntity, BinarySensorEntity):
|
||||
"""Representation of a LCN sensor for key locks."""
|
||||
|
||||
def __init__(self, config: ConfigType, config_entry: LcnConfigEntry) -> None:
|
||||
"""Initialize the LCN sensor."""
|
||||
super().__init__(config, config_entry)
|
||||
|
||||
self.source = pypck.lcn_defs.Key[config[CONF_DOMAIN_DATA][CONF_SOURCE]]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.source)
|
||||
|
||||
entity_automations = automations_with_entity(self.hass, self.entity_id)
|
||||
entity_scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
if entity_automations + entity_scripts:
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_binary_sensor_{self.entity_id}",
|
||||
breaks_in_ha_version="2025.5.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_keylock_sensor",
|
||||
translation_placeholders={
|
||||
"entity": f"{DOMAIN_BINARY_SENSOR}.{self.name.lower().replace(' ', '_')}",
|
||||
},
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.source)
|
||||
async_delete_issue(
|
||||
self.hass, DOMAIN, f"deprecated_binary_sensor_{self.entity_id}"
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
if (
|
||||
not isinstance(input_obj, pypck.inputs.ModStatusKeyLocks)
|
||||
or self.source not in pypck.lcn_defs.Key
|
||||
):
|
||||
return
|
||||
|
||||
table_id = ord(self.source.name[0]) - 65
|
||||
key_id = int(self.source.name[1]) - 1
|
||||
|
||||
self._attr_is_on = input_obj.get_state(table_id, key_id)
|
||||
self.async_write_ha_state()
|
||||
|
@ -18,6 +18,7 @@ from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.color import brightness_to_value, value_to_brightness
|
||||
|
||||
from .const import (
|
||||
CONF_DIMMABLE,
|
||||
@ -29,6 +30,8 @@ from .const import (
|
||||
from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
BRIGHTNESS_SCALE = (1, 100)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@ -91,8 +94,6 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
)
|
||||
self.dimmable = config[CONF_DOMAIN_DATA][CONF_DIMMABLE]
|
||||
|
||||
self._is_dimming_to_zero = False
|
||||
|
||||
if self.dimmable:
|
||||
self._attr_color_mode = ColorMode.BRIGHTNESS
|
||||
else:
|
||||
@ -113,10 +114,6 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
percent = int(kwargs[ATTR_BRIGHTNESS] / 255.0 * 100)
|
||||
else:
|
||||
percent = 100
|
||||
if ATTR_TRANSITION in kwargs:
|
||||
transition = pypck.lcn_defs.time_to_ramp_value(
|
||||
kwargs[ATTR_TRANSITION] * 1000
|
||||
@ -124,12 +121,23 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
else:
|
||||
transition = self._transition
|
||||
|
||||
if not await self.device_connection.dim_output(
|
||||
self.output.value, percent, transition
|
||||
):
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
percent = int(
|
||||
brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])
|
||||
)
|
||||
if not await self.device_connection.dim_output(
|
||||
self.output.value, percent, transition
|
||||
):
|
||||
return
|
||||
elif not self.is_on:
|
||||
if not await self.device_connection.toggle_output(
|
||||
self.output.value, transition, to_memory=True
|
||||
):
|
||||
return
|
||||
else:
|
||||
return
|
||||
|
||||
self._attr_is_on = True
|
||||
self._is_dimming_to_zero = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
@ -141,13 +149,13 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
else:
|
||||
transition = self._transition
|
||||
|
||||
if not await self.device_connection.dim_output(
|
||||
self.output.value, 0, transition
|
||||
):
|
||||
return
|
||||
self._is_dimming_to_zero = bool(transition)
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
if self.is_on:
|
||||
if not await self.device_connection.toggle_output(
|
||||
self.output.value, transition, to_memory=True
|
||||
):
|
||||
return
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set light state when LCN input object (command) is received."""
|
||||
@ -157,11 +165,9 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
):
|
||||
return
|
||||
|
||||
self._attr_brightness = int(input_obj.get_percent() / 100.0 * 255)
|
||||
if self._attr_brightness == 0:
|
||||
self._is_dimming_to_zero = False
|
||||
if not self._is_dimming_to_zero and self._attr_brightness is not None:
|
||||
self._attr_is_on = self._attr_brightness > 0
|
||||
percent = input_obj.get_percent()
|
||||
self._attr_brightness = value_to_brightness(BRIGHTNESS_SCALE, percent)
|
||||
self._attr_is_on = bool(percent)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
|
@ -9,5 +9,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pypck==0.8.10", "lcn-frontend==0.2.5"]
|
||||
"requirements": ["pypck==0.8.10", "lcn-frontend==0.2.6"]
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ from homeassistant.components.light import (
|
||||
from homeassistant.const import ATTR_MODE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import (
|
||||
from homeassistant.helpers.target import (
|
||||
TargetSelectorData,
|
||||
async_extract_referenced_entity_ids,
|
||||
)
|
||||
|
@ -136,7 +136,7 @@ class LookinMedia(LookinPowerPushRemoteEntity, MediaPlayerEntity):
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn the media player off."""
|
||||
await self._async_send_command(self._power_off_command)
|
||||
self._attr_state = MediaPlayerState.STANDBY
|
||||
self._attr_state = MediaPlayerState.OFF
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
@ -159,7 +159,5 @@ class LookinMedia(LookinPowerPushRemoteEntity, MediaPlayerEntity):
|
||||
state = status[0]
|
||||
mute = status[2]
|
||||
|
||||
self._attr_state = (
|
||||
MediaPlayerState.ON if state == "1" else MediaPlayerState.STANDBY
|
||||
)
|
||||
self._attr_state = MediaPlayerState.ON if state == "1" else MediaPlayerState.OFF
|
||||
self._attr_is_volume_muted = mute == "0"
|
||||
|
@ -54,7 +54,7 @@ class MatterBinarySensor(MatterEntity, BinarySensorEntity):
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value in (None, NullValue):
|
||||
value = None
|
||||
elif value_convert := self.entity_description.measurement_to_ha:
|
||||
elif value_convert := self.entity_description.device_to_ha:
|
||||
value = value_convert(value)
|
||||
if TYPE_CHECKING:
|
||||
value = cast(bool | None, value)
|
||||
@ -70,7 +70,7 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="HueMotionSensor",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
measurement_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
||||
device_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
||||
@ -83,7 +83,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="OccupancySensor",
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
# The first bit = if occupied
|
||||
measurement_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
||||
device_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
||||
@ -94,7 +94,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="BatteryChargeLevel",
|
||||
device_class=BinarySensorDeviceClass.BATTERY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
measurement_to_ha=lambda x: x
|
||||
device_to_ha=lambda x: x
|
||||
!= clusters.PowerSource.Enums.BatChargeLevelEnum.kOk,
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@ -109,7 +109,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="ContactSensor",
|
||||
device_class=BinarySensorDeviceClass.DOOR,
|
||||
# value is inverted on matter to what we expect
|
||||
measurement_to_ha=lambda x: not x,
|
||||
device_to_ha=lambda x: not x,
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.BooleanState.Attributes.StateValue,),
|
||||
@ -153,7 +153,7 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="LockDoorStateSensor",
|
||||
device_class=BinarySensorDeviceClass.DOOR,
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
clusters.DoorLock.Enums.DoorStateEnum.kDoorOpen: True,
|
||||
clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed: True,
|
||||
clusters.DoorLock.Enums.DoorStateEnum.kDoorForcedOpen: True,
|
||||
@ -168,7 +168,7 @@ DISCOVERY_SCHEMAS = [
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="SmokeCoAlarmDeviceMutedSensor",
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.SmokeCoAlarm.Enums.MuteStateEnum.kMuted
|
||||
),
|
||||
translation_key="muted",
|
||||
@ -181,7 +181,7 @@ DISCOVERY_SCHEMAS = [
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="SmokeCoAlarmEndfOfServiceSensor",
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.SmokeCoAlarm.Enums.EndOfServiceEnum.kExpired
|
||||
),
|
||||
translation_key="end_of_service",
|
||||
@ -195,7 +195,7 @@ DISCOVERY_SCHEMAS = [
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="SmokeCoAlarmBatteryAlertSensor",
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal
|
||||
),
|
||||
translation_key="battery_alert",
|
||||
@ -232,7 +232,7 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="SmokeCoAlarmSmokeStateSensor",
|
||||
device_class=BinarySensorDeviceClass.SMOKE,
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal
|
||||
),
|
||||
),
|
||||
@ -244,7 +244,7 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="SmokeCoAlarmInterconnectSmokeAlarmSensor",
|
||||
device_class=BinarySensorDeviceClass.SMOKE,
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal
|
||||
),
|
||||
translation_key="interconnected_smoke_alarm",
|
||||
@ -257,7 +257,7 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="SmokeCoAlarmInterconnectCOAlarmSensor",
|
||||
device_class=BinarySensorDeviceClass.CO,
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal
|
||||
),
|
||||
translation_key="interconnected_co_alarm",
|
||||
@ -271,7 +271,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="EnergyEvseChargingStatusSensor",
|
||||
translation_key="evse_charging_status",
|
||||
device_class=BinarySensorDeviceClass.BATTERY_CHARGING,
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
clusters.EnergyEvse.Enums.StateEnum.kNotPluggedIn: False,
|
||||
clusters.EnergyEvse.Enums.StateEnum.kPluggedInNoDemand: False,
|
||||
clusters.EnergyEvse.Enums.StateEnum.kPluggedInDemand: False,
|
||||
@ -291,7 +291,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="EnergyEvsePlugStateSensor",
|
||||
translation_key="evse_plug_state",
|
||||
device_class=BinarySensorDeviceClass.PLUG,
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
clusters.EnergyEvse.Enums.StateEnum.kNotPluggedIn: False,
|
||||
clusters.EnergyEvse.Enums.StateEnum.kPluggedInNoDemand: True,
|
||||
clusters.EnergyEvse.Enums.StateEnum.kPluggedInDemand: True,
|
||||
@ -309,9 +309,9 @@ DISCOVERY_SCHEMAS = [
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="EnergyEvseSupplyStateSensor",
|
||||
translation_key="evse_supply_charging_state",
|
||||
translation_key="evse_supply_state",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
clusters.EnergyEvse.Enums.SupplyStateEnum.kDisabled: False,
|
||||
clusters.EnergyEvse.Enums.SupplyStateEnum.kChargingEnabled: True,
|
||||
clusters.EnergyEvse.Enums.SupplyStateEnum.kDischargingEnabled: False,
|
||||
@ -327,7 +327,7 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="WaterHeaterManagementBoostStateSensor",
|
||||
translation_key="boost_state",
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.WaterHeaterManagement.Enums.BoostStateEnum.kActive
|
||||
),
|
||||
),
|
||||
@ -342,7 +342,7 @@ DISCOVERY_SCHEMAS = [
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
# DeviceFault or SupplyFault bit enabled
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault: True,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault: True,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedLow: False,
|
||||
@ -366,7 +366,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="PumpStatusRunning",
|
||||
translation_key="pump_running",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x
|
||||
== clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
|
||||
),
|
||||
@ -384,7 +384,7 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="dishwasher_alarm_inflow",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
|
||||
),
|
||||
),
|
||||
@ -399,7 +399,7 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="dishwasher_alarm_door",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
measurement_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
|
||||
),
|
||||
),
|
||||
|
@ -59,8 +59,8 @@ class MatterEntityDescription(EntityDescription):
|
||||
"""Describe the Matter entity."""
|
||||
|
||||
# convert the value from the primary attribute to the value used by HA
|
||||
measurement_to_ha: Callable[[Any], Any] | None = None
|
||||
ha_to_native_value: Callable[[Any], Any] | None = None
|
||||
device_to_ha: Callable[[Any], Any] | None = None
|
||||
ha_to_device: Callable[[Any], Any] | None = None
|
||||
command_timeout: int | None = None
|
||||
|
||||
|
||||
|
@ -55,7 +55,7 @@ class MatterRangeNumberEntityDescription(
|
||||
):
|
||||
"""Describe Matter Number Input entities with min and max values."""
|
||||
|
||||
ha_to_native_value: Callable[[Any], Any]
|
||||
ha_to_device: Callable[[Any], Any]
|
||||
|
||||
# attribute descriptors to get the min and max value
|
||||
min_attribute: type[ClusterAttributeDescriptor]
|
||||
@ -74,7 +74,7 @@ class MatterNumber(MatterEntity, NumberEntity):
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
sendvalue = int(value)
|
||||
if value_convert := self.entity_description.ha_to_native_value:
|
||||
if value_convert := self.entity_description.ha_to_device:
|
||||
sendvalue = value_convert(value)
|
||||
await self.write_attribute(
|
||||
value=sendvalue,
|
||||
@ -84,7 +84,7 @@ class MatterNumber(MatterEntity, NumberEntity):
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value_convert := self.entity_description.measurement_to_ha:
|
||||
if value_convert := self.entity_description.device_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_native_value = value
|
||||
|
||||
@ -96,7 +96,7 @@ class MatterRangeNumber(MatterEntity, NumberEntity):
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
send_value = self.entity_description.ha_to_native_value(value)
|
||||
send_value = self.entity_description.ha_to_device(value)
|
||||
# custom command defined to set the new value
|
||||
await self.send_device_command(
|
||||
self.entity_description.command(send_value),
|
||||
@ -106,7 +106,7 @@ class MatterRangeNumber(MatterEntity, NumberEntity):
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value_convert := self.entity_description.measurement_to_ha:
|
||||
if value_convert := self.entity_description.device_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_native_value = value
|
||||
self._attr_native_min_value = (
|
||||
@ -133,7 +133,7 @@ class MatterLevelControlNumber(MatterEntity, NumberEntity):
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set level value."""
|
||||
send_value = int(value)
|
||||
if value_convert := self.entity_description.ha_to_native_value:
|
||||
if value_convert := self.entity_description.ha_to_device:
|
||||
send_value = value_convert(value)
|
||||
await self.send_device_command(
|
||||
clusters.LevelControl.Commands.MoveToLevel(
|
||||
@ -145,7 +145,7 @@ class MatterLevelControlNumber(MatterEntity, NumberEntity):
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value_convert := self.entity_description.measurement_to_ha:
|
||||
if value_convert := self.entity_description.device_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_native_value = value
|
||||
|
||||
@ -162,8 +162,8 @@ DISCOVERY_SCHEMAS = [
|
||||
native_min_value=0,
|
||||
mode=NumberMode.BOX,
|
||||
# use 255 to indicate that the value should revert to the default
|
||||
measurement_to_ha=lambda x: 255 if x is None else x,
|
||||
ha_to_native_value=lambda x: None if x == 255 else int(x),
|
||||
device_to_ha=lambda x: 255 if x is None else x,
|
||||
ha_to_device=lambda x: None if x == 255 else int(x),
|
||||
native_step=1,
|
||||
native_unit_of_measurement=None,
|
||||
),
|
||||
@ -180,8 +180,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="on_transition_time",
|
||||
native_max_value=65534,
|
||||
native_min_value=0,
|
||||
measurement_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_native_value=lambda x: round(x * 10),
|
||||
device_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_device=lambda x: round(x * 10),
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
mode=NumberMode.BOX,
|
||||
@ -199,8 +199,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="off_transition_time",
|
||||
native_max_value=65534,
|
||||
native_min_value=0,
|
||||
measurement_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_native_value=lambda x: round(x * 10),
|
||||
device_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_device=lambda x: round(x * 10),
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
mode=NumberMode.BOX,
|
||||
@ -218,8 +218,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="on_off_transition_time",
|
||||
native_max_value=65534,
|
||||
native_min_value=0,
|
||||
measurement_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_native_value=lambda x: round(x * 10),
|
||||
device_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_device=lambda x: round(x * 10),
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
mode=NumberMode.BOX,
|
||||
@ -256,8 +256,8 @@ DISCOVERY_SCHEMAS = [
|
||||
native_min_value=-50,
|
||||
native_step=0.5,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
measurement_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_native_value=lambda x: round(x * 10),
|
||||
device_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_device=lambda x: round(x * 10),
|
||||
mode=NumberMode.BOX,
|
||||
),
|
||||
entity_class=MatterNumber,
|
||||
@ -275,10 +275,10 @@ DISCOVERY_SCHEMAS = [
|
||||
native_max_value=100,
|
||||
native_min_value=0.5,
|
||||
native_step=0.5,
|
||||
measurement_to_ha=(
|
||||
device_to_ha=(
|
||||
lambda x: None if x is None else x / 2 # Matter range (1-200)
|
||||
),
|
||||
ha_to_native_value=lambda x: round(x * 2), # HA range 0.5–100.0%
|
||||
ha_to_device=lambda x: round(x * 2), # HA range 0.5–100.0%
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
entity_class=MatterLevelControlNumber,
|
||||
@ -326,8 +326,8 @@ DISCOVERY_SCHEMAS = [
|
||||
targetTemperature=value
|
||||
),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
measurement_to_ha=lambda x: None if x is None else x / 100,
|
||||
ha_to_native_value=lambda x: round(x * 100),
|
||||
device_to_ha=lambda x: None if x is None else x / 100,
|
||||
ha_to_device=lambda x: round(x * 100),
|
||||
min_attribute=clusters.TemperatureControl.Attributes.MinTemperature,
|
||||
max_attribute=clusters.TemperatureControl.Attributes.MaxTemperature,
|
||||
mode=NumberMode.SLIDER,
|
||||
|
@ -71,8 +71,8 @@ class MatterSelectEntityDescription(SelectEntityDescription, MatterEntityDescrip
|
||||
class MatterMapSelectEntityDescription(MatterSelectEntityDescription):
|
||||
"""Describe Matter select entities for MatterMapSelectEntityDescription."""
|
||||
|
||||
measurement_to_ha: Callable[[int], str | None]
|
||||
ha_to_native_value: Callable[[str], int | None]
|
||||
device_to_ha: Callable[[int], str | None]
|
||||
ha_to_device: Callable[[str], int | None]
|
||||
|
||||
# list attribute: the attribute descriptor to get the list of values (= list of integers)
|
||||
list_attribute: type[ClusterAttributeDescriptor]
|
||||
@ -97,7 +97,7 @@ class MatterAttributeSelectEntity(MatterEntity, SelectEntity):
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected mode."""
|
||||
value_convert = self.entity_description.ha_to_native_value
|
||||
value_convert = self.entity_description.ha_to_device
|
||||
if TYPE_CHECKING:
|
||||
assert value_convert is not None
|
||||
await self.write_attribute(
|
||||
@ -109,7 +109,7 @@ class MatterAttributeSelectEntity(MatterEntity, SelectEntity):
|
||||
"""Update from device."""
|
||||
value: Nullable | int | None
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
value_convert = self.entity_description.measurement_to_ha
|
||||
value_convert = self.entity_description.device_to_ha
|
||||
if TYPE_CHECKING:
|
||||
assert value_convert is not None
|
||||
self._attr_current_option = value_convert(value)
|
||||
@ -132,7 +132,7 @@ class MatterMapSelectEntity(MatterAttributeSelectEntity):
|
||||
self._attr_options = [
|
||||
mapped_value
|
||||
for value in available_values
|
||||
if (mapped_value := self.entity_description.measurement_to_ha(value))
|
||||
if (mapped_value := self.entity_description.device_to_ha(value))
|
||||
]
|
||||
# use base implementation from MatterAttributeSelectEntity to set the current option
|
||||
super()._update_from_device()
|
||||
@ -333,13 +333,13 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="startup_on_off",
|
||||
options=["on", "off", "toggle", "previous"],
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
0: "off",
|
||||
1: "on",
|
||||
2: "toggle",
|
||||
None: "previous",
|
||||
}.get,
|
||||
ha_to_native_value={
|
||||
ha_to_device={
|
||||
"off": 0,
|
||||
"on": 1,
|
||||
"toggle": 2,
|
||||
@ -358,12 +358,12 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="sensitivity_level",
|
||||
options=["high", "standard", "low"],
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
0: "high",
|
||||
1: "standard",
|
||||
2: "low",
|
||||
}.get,
|
||||
ha_to_native_value={
|
||||
ha_to_device={
|
||||
"high": 0,
|
||||
"standard": 1,
|
||||
"low": 2,
|
||||
@ -379,11 +379,11 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="temperature_display_mode",
|
||||
options=["Celsius", "Fahrenheit"],
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
0: "Celsius",
|
||||
1: "Fahrenheit",
|
||||
}.get,
|
||||
ha_to_native_value={
|
||||
ha_to_device={
|
||||
"Celsius": 0,
|
||||
"Fahrenheit": 1,
|
||||
}.get,
|
||||
@ -432,8 +432,8 @@ DISCOVERY_SCHEMAS = [
|
||||
key="MatterLaundryWasherNumberOfRinses",
|
||||
translation_key="laundry_washer_number_of_rinses",
|
||||
list_attribute=clusters.LaundryWasherControls.Attributes.SupportedRinses,
|
||||
measurement_to_ha=NUMBER_OF_RINSES_STATE_MAP.get,
|
||||
ha_to_native_value=NUMBER_OF_RINSES_STATE_MAP_REVERSE.get,
|
||||
device_to_ha=NUMBER_OF_RINSES_STATE_MAP.get,
|
||||
ha_to_device=NUMBER_OF_RINSES_STATE_MAP_REVERSE.get,
|
||||
),
|
||||
entity_class=MatterMapSelectEntity,
|
||||
required_attributes=(
|
||||
@ -450,13 +450,13 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="door_lock_sound_volume",
|
||||
options=["silent", "low", "medium", "high"],
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
0: "silent",
|
||||
1: "low",
|
||||
3: "medium",
|
||||
2: "high",
|
||||
}.get,
|
||||
ha_to_native_value={
|
||||
ha_to_device={
|
||||
"silent": 0,
|
||||
"low": 1,
|
||||
"medium": 3,
|
||||
@ -472,8 +472,8 @@ DISCOVERY_SCHEMAS = [
|
||||
key="PumpConfigurationAndControlOperationMode",
|
||||
translation_key="pump_operation_mode",
|
||||
options=list(PUMP_OPERATION_MODE_MAP.values()),
|
||||
measurement_to_ha=PUMP_OPERATION_MODE_MAP.get,
|
||||
ha_to_native_value=PUMP_OPERATION_MODE_MAP_REVERSE.get,
|
||||
device_to_ha=PUMP_OPERATION_MODE_MAP.get,
|
||||
ha_to_device=PUMP_OPERATION_MODE_MAP_REVERSE.get,
|
||||
),
|
||||
entity_class=MatterAttributeSelectEntity,
|
||||
required_attributes=(
|
||||
|
@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@ -44,7 +44,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .helpers import get_matter
|
||||
@ -194,7 +194,7 @@ class MatterSensor(MatterEntity, SensorEntity):
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value in (None, NullValue):
|
||||
value = None
|
||||
elif value_convert := self.entity_description.measurement_to_ha:
|
||||
elif value_convert := self.entity_description.device_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_native_value = value
|
||||
|
||||
@ -296,7 +296,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="TemperatureSensor",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
measurement_to_ha=lambda x: x / 100,
|
||||
device_to_ha=lambda x: x / 100,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@ -308,7 +308,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="PressureSensor",
|
||||
native_unit_of_measurement=UnitOfPressure.KPA,
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
measurement_to_ha=lambda x: x / 10,
|
||||
device_to_ha=lambda x: x / 10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@ -320,7 +320,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="FlowSensor",
|
||||
native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR,
|
||||
translation_key="flow",
|
||||
measurement_to_ha=lambda x: x / 10,
|
||||
device_to_ha=lambda x: x / 10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@ -332,7 +332,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="HumiditySensor",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
measurement_to_ha=lambda x: x / 100,
|
||||
device_to_ha=lambda x: x / 100,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@ -346,7 +346,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="LightSensor",
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
measurement_to_ha=lambda x: round(pow(10, ((x - 1) / 10000)), 1),
|
||||
device_to_ha=lambda x: round(pow(10, ((x - 1) / 10000)), 1),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@ -360,7 +360,7 @@ DISCOVERY_SCHEMAS = [
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
# value has double precision
|
||||
measurement_to_ha=lambda x: int(x / 2),
|
||||
device_to_ha=lambda x: int(x / 2),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@ -402,7 +402,7 @@ DISCOVERY_SCHEMAS = [
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=[state for state in CHARGE_STATE_MAP.values() if state is not None],
|
||||
measurement_to_ha=CHARGE_STATE_MAP.get,
|
||||
device_to_ha=CHARGE_STATE_MAP.get,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.PowerSource.Attributes.BatChargeState,),
|
||||
@ -589,7 +589,7 @@ DISCOVERY_SCHEMAS = [
|
||||
state_class=None,
|
||||
# convert to set first to remove the duplicate unknown value
|
||||
options=[x for x in AIR_QUALITY_MAP.values() if x is not None],
|
||||
measurement_to_ha=lambda x: AIR_QUALITY_MAP[x],
|
||||
device_to_ha=lambda x: AIR_QUALITY_MAP[x],
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.AirQuality.Attributes.AirQuality,),
|
||||
@ -668,7 +668,7 @@ DISCOVERY_SCHEMAS = [
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_display_precision=2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
measurement_to_ha=lambda x: x / 1000,
|
||||
device_to_ha=lambda x: x / 1000,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(
|
||||
@ -685,7 +685,7 @@ DISCOVERY_SCHEMAS = [
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
measurement_to_ha=lambda x: x / 1000,
|
||||
device_to_ha=lambda x: x / 1000,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(
|
||||
@ -702,7 +702,7 @@ DISCOVERY_SCHEMAS = [
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_display_precision=2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
measurement_to_ha=lambda x: x / 10,
|
||||
device_to_ha=lambda x: x / 10,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(NeoCluster.Attributes.Watt,),
|
||||
@ -731,7 +731,7 @@ DISCOVERY_SCHEMAS = [
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
suggested_display_precision=0,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
measurement_to_ha=lambda x: x / 10,
|
||||
device_to_ha=lambda x: x / 10,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(NeoCluster.Attributes.Voltage,),
|
||||
@ -823,7 +823,7 @@ DISCOVERY_SCHEMAS = [
|
||||
suggested_display_precision=3,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
# id 0 of the EnergyMeasurementStruct is the cumulative energy (in mWh)
|
||||
measurement_to_ha=lambda x: x.energy,
|
||||
device_to_ha=lambda x: x.energy,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(
|
||||
@ -842,7 +842,7 @@ DISCOVERY_SCHEMAS = [
|
||||
suggested_display_precision=3,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
# id 0 of the EnergyMeasurementStruct is the cumulative energy (in mWh)
|
||||
measurement_to_ha=lambda x: x.energy,
|
||||
device_to_ha=lambda x: x.energy,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(
|
||||
@ -910,7 +910,7 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="contamination_state",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(CONTAMINATION_STATE_MAP.values()),
|
||||
measurement_to_ha=CONTAMINATION_STATE_MAP.get,
|
||||
device_to_ha=CONTAMINATION_STATE_MAP.get,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.SmokeCoAlarm.Attributes.ContaminationState,),
|
||||
@ -922,7 +922,7 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="expiry_date",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
# raw value is epoch seconds
|
||||
measurement_to_ha=datetime.fromtimestamp,
|
||||
device_to_ha=datetime.fromtimestamp,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.SmokeCoAlarm.Attributes.ExpiryDate,),
|
||||
@ -942,6 +942,21 @@ DISCOVERY_SCHEMAS = [
|
||||
# don't discover this entry if the supported state list is empty
|
||||
secondary_value_is_not=[],
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterSensorEntityDescription(
|
||||
key="OperationalStateCountdownTime",
|
||||
translation_key="estimated_end_time",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state_class=None,
|
||||
# Add countdown to current datetime to get the estimated end time
|
||||
device_to_ha=(
|
||||
lambda x: dt_util.utcnow() + timedelta(seconds=x) if x > 0 else None
|
||||
),
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.OperationalState.Attributes.CountdownTime,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterListSensorEntityDescription(
|
||||
@ -993,7 +1008,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="ThermostatLocalTemperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
measurement_to_ha=lambda x: x / 100,
|
||||
device_to_ha=lambda x: x / 100,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@ -1044,7 +1059,7 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
translation_key="window_covering_target_position",
|
||||
measurement_to_ha=lambda x: round((10000 - x) / 100),
|
||||
device_to_ha=lambda x: round((10000 - x) / 100),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@ -1060,7 +1075,7 @@ DISCOVERY_SCHEMAS = [
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=list(EVSE_FAULT_STATE_MAP.values()),
|
||||
measurement_to_ha=EVSE_FAULT_STATE_MAP.get,
|
||||
device_to_ha=EVSE_FAULT_STATE_MAP.get,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.EnergyEvse.Attributes.FaultState,),
|
||||
@ -1173,7 +1188,7 @@ DISCOVERY_SCHEMAS = [
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=list(ESA_STATE_MAP.values()),
|
||||
measurement_to_ha=ESA_STATE_MAP.get,
|
||||
device_to_ha=ESA_STATE_MAP.get,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.DeviceEnergyManagement.Attributes.ESAState,),
|
||||
@ -1186,7 +1201,7 @@ DISCOVERY_SCHEMAS = [
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=list(DEM_OPT_OUT_STATE_MAP.values()),
|
||||
measurement_to_ha=DEM_OPT_OUT_STATE_MAP.get,
|
||||
device_to_ha=DEM_OPT_OUT_STATE_MAP.get,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.DeviceEnergyManagement.Attributes.OptOutState,),
|
||||
@ -1200,7 +1215,7 @@ DISCOVERY_SCHEMAS = [
|
||||
options=[
|
||||
mode for mode in PUMP_CONTROL_MODE_MAP.values() if mode is not None
|
||||
],
|
||||
measurement_to_ha=PUMP_CONTROL_MODE_MAP.get,
|
||||
device_to_ha=PUMP_CONTROL_MODE_MAP.get,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(
|
||||
|
@ -83,8 +83,8 @@
|
||||
"evse_plug": {
|
||||
"name": "Plug state"
|
||||
},
|
||||
"evse_supply_charging_state": {
|
||||
"name": "Supply charging state"
|
||||
"evse_supply_state": {
|
||||
"name": "Charger supply state"
|
||||
},
|
||||
"boost_state": {
|
||||
"name": "Boost state"
|
||||
@ -193,7 +193,7 @@
|
||||
"name": "Occupied to unoccupied delay"
|
||||
},
|
||||
"auto_relock_timer": {
|
||||
"name": "Automatic relock timer"
|
||||
"name": "Autorelock time"
|
||||
}
|
||||
},
|
||||
"light": {
|
||||
@ -318,6 +318,9 @@
|
||||
"docked": "Docked"
|
||||
}
|
||||
},
|
||||
"estimated_end_time": {
|
||||
"name": "Estimated end time"
|
||||
},
|
||||
"switch_current_position": {
|
||||
"name": "Current switch position"
|
||||
},
|
||||
|
@ -95,7 +95,7 @@ class MatterGenericCommandSwitch(MatterSwitch):
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value_convert := self.entity_description.measurement_to_ha:
|
||||
if value_convert := self.entity_description.device_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_is_on = value
|
||||
|
||||
@ -141,7 +141,7 @@ class MatterNumericSwitch(MatterSwitch):
|
||||
|
||||
async def _async_set_native_value(self, value: bool) -> None:
|
||||
"""Update the current value."""
|
||||
if value_convert := self.entity_description.ha_to_native_value:
|
||||
if value_convert := self.entity_description.ha_to_device:
|
||||
send_value = value_convert(value)
|
||||
await self.write_attribute(
|
||||
value=send_value,
|
||||
@ -159,7 +159,7 @@ class MatterNumericSwitch(MatterSwitch):
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value_convert := self.entity_description.measurement_to_ha:
|
||||
if value_convert := self.entity_description.device_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_is_on = value
|
||||
|
||||
@ -248,11 +248,11 @@ DISCOVERY_SCHEMAS = [
|
||||
key="EveTrvChildLock",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="child_lock",
|
||||
measurement_to_ha={
|
||||
device_to_ha={
|
||||
0: False,
|
||||
1: True,
|
||||
}.get,
|
||||
ha_to_native_value={
|
||||
ha_to_device={
|
||||
False: 0,
|
||||
True: 1,
|
||||
}.get,
|
||||
@ -275,7 +275,7 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
off_command=clusters.EnergyEvse.Commands.Disable,
|
||||
command_timeout=3000,
|
||||
measurement_to_ha=EVSE_SUPPLY_STATE_MAP.get,
|
||||
device_to_ha=EVSE_SUPPLY_STATE_MAP.get,
|
||||
),
|
||||
entity_class=MatterGenericCommandSwitch,
|
||||
required_attributes=(
|
||||
|
@ -80,7 +80,7 @@ class LocalSource(MediaSource):
|
||||
path = self.async_full_path(source_dir_id, location)
|
||||
mime_type, _ = mimetypes.guess_type(str(path))
|
||||
assert isinstance(mime_type, str)
|
||||
return PlayMedia(f"/media/{item.identifier}", mime_type)
|
||||
return PlayMedia(f"/media/{item.identifier}", mime_type, path=path)
|
||||
|
||||
async def async_browse_media(self, item: MediaSourceItem) -> BrowseMediaSource:
|
||||
"""Return media."""
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType
|
||||
@ -10,6 +10,9 @@ from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import MEDIA_SOURCE_DATA, URI_SCHEME, URI_SCHEME_REGEX
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PlayMedia:
|
||||
@ -17,6 +20,7 @@ class PlayMedia:
|
||||
|
||||
url: str
|
||||
mime_type: str
|
||||
path: Path | None = field(kw_only=True, default=None)
|
||||
|
||||
|
||||
class BrowseMediaSource(BrowseMedia):
|
||||
@ -45,6 +49,16 @@ class MediaSourceItem:
|
||||
identifier: str
|
||||
target_media_player: str | None
|
||||
|
||||
@property
|
||||
def media_source_id(self) -> str:
|
||||
"""Return the media source ID."""
|
||||
uri = URI_SCHEME
|
||||
if self.domain:
|
||||
uri += self.domain
|
||||
if self.identifier:
|
||||
uri += f"/{self.identifier}"
|
||||
return uri
|
||||
|
||||
async def async_browse(self) -> BrowseMediaSource:
|
||||
"""Browse this item."""
|
||||
if self.domain is None:
|
||||
|
@ -134,7 +134,7 @@ class MediaroomDevice(MediaPlayerEntity):
|
||||
|
||||
state_map = {
|
||||
State.OFF: MediaPlayerState.OFF,
|
||||
State.STANDBY: MediaPlayerState.STANDBY,
|
||||
State.STANDBY: MediaPlayerState.IDLE,
|
||||
State.PLAYING_LIVE_TV: MediaPlayerState.PLAYING,
|
||||
State.PLAYING_RECORDED_TV: MediaPlayerState.PLAYING,
|
||||
State.PLAYING_TIMESHIFT_TV: MediaPlayerState.PLAYING,
|
||||
@ -155,7 +155,7 @@ class MediaroomDevice(MediaPlayerEntity):
|
||||
self._channel = None
|
||||
self._optimistic = optimistic
|
||||
self._attr_state = (
|
||||
MediaPlayerState.PLAYING if optimistic else MediaPlayerState.STANDBY
|
||||
MediaPlayerState.PLAYING if optimistic else MediaPlayerState.IDLE
|
||||
)
|
||||
self._name = f"Mediaroom {device_id if device_id else host}"
|
||||
self._available = True
|
||||
@ -254,7 +254,7 @@ class MediaroomDevice(MediaPlayerEntity):
|
||||
try:
|
||||
self.set_state(await self.stb.turn_off())
|
||||
if self._optimistic:
|
||||
self._attr_state = MediaPlayerState.STANDBY
|
||||
self._attr_state = MediaPlayerState.IDLE
|
||||
self._available = True
|
||||
except PyMediaroomError:
|
||||
self._available = False
|
||||
|
@ -1314,7 +1314,7 @@ class PlatePowerStep(MieleEnum):
|
||||
plate_step_11 = 11
|
||||
plate_step_12 = 12
|
||||
plate_step_13 = 13
|
||||
plate_step_14 = 4
|
||||
plate_step_14 = 14
|
||||
plate_step_15 = 15
|
||||
plate_step_16 = 16
|
||||
plate_step_17 = 17
|
||||
|
@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/motion_blinds",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["motionblinds"],
|
||||
"requirements": ["motionblinds==0.6.28"]
|
||||
"requirements": ["motionblinds==0.6.29"]
|
||||
}
|
||||
|
@ -12,3 +12,4 @@ PLATFORMS = [Platform.SENSOR]
|
||||
DEFAULT_NAME = "Nord Pool"
|
||||
|
||||
CONF_AREAS = "areas"
|
||||
ATTR_RESOLUTION = "resolution"
|
||||
|
@ -42,6 +42,9 @@
|
||||
"services": {
|
||||
"get_prices_for_date": {
|
||||
"service": "mdi:cash-multiple"
|
||||
},
|
||||
"get_price_indices_for_date": {
|
||||
"service": "mdi:cash-multiple"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,16 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import date, datetime
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pynordpool import (
|
||||
AREAS,
|
||||
Currency,
|
||||
DeliveryPeriodData,
|
||||
NordPoolAuthenticationError,
|
||||
NordPoolClient,
|
||||
NordPoolEmptyResponseError,
|
||||
NordPoolError,
|
||||
PriceIndicesData,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
@ -32,7 +37,7 @@ from homeassistant.util.json import JsonValueType
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import NordPoolConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .const import ATTR_RESOLUTION, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
ATTR_CONFIG_ENTRY = "config_entry"
|
||||
@ -40,6 +45,7 @@ ATTR_AREAS = "areas"
|
||||
ATTR_CURRENCY = "currency"
|
||||
|
||||
SERVICE_GET_PRICES_FOR_DATE = "get_prices_for_date"
|
||||
SERVICE_GET_PRICE_INDICES_FOR_DATE = "get_price_indices_for_date"
|
||||
SERVICE_GET_PRICES_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
@ -50,6 +56,13 @@ SERVICE_GET_PRICES_SCHEMA = vol.Schema(
|
||||
),
|
||||
}
|
||||
)
|
||||
SERVICE_GET_PRICE_INDICES_SCHEMA = SERVICE_GET_PRICES_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(ATTR_RESOLUTION, default=60): vol.All(
|
||||
cv.positive_int, vol.All(vol.Coerce(int), vol.In((15, 30, 60)))
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> NordPoolConfigEntry:
|
||||
@ -71,11 +84,13 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> NordPoolConfigEntry:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Nord Pool integration."""
|
||||
|
||||
async def get_prices_for_date(call: ServiceCall) -> ServiceResponse:
|
||||
"""Get price service."""
|
||||
def get_service_params(
|
||||
call: ServiceCall,
|
||||
) -> tuple[NordPoolClient, date, str, list[str], int]:
|
||||
"""Return the parameters for the service."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
asked_date: date = call.data[ATTR_DATE]
|
||||
client = entry.runtime_data.client
|
||||
asked_date: date = call.data[ATTR_DATE]
|
||||
|
||||
areas: list[str] = entry.data[ATTR_AREAS]
|
||||
if _areas := call.data.get(ATTR_AREAS):
|
||||
@ -85,14 +100,55 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
if _currency := call.data.get(ATTR_CURRENCY):
|
||||
currency = _currency
|
||||
|
||||
resolution: int = 60
|
||||
if _resolution := call.data.get(ATTR_RESOLUTION):
|
||||
resolution = _resolution
|
||||
|
||||
areas = [area.upper() for area in areas]
|
||||
currency = currency.upper()
|
||||
|
||||
return (client, asked_date, currency, areas, resolution)
|
||||
|
||||
async def get_prices_for_date(
|
||||
client: NordPoolClient,
|
||||
asked_date: date,
|
||||
currency: str,
|
||||
areas: list[str],
|
||||
resolution: int,
|
||||
) -> DeliveryPeriodData:
|
||||
"""Get prices."""
|
||||
return await client.async_get_delivery_period(
|
||||
datetime.combine(asked_date, dt_util.utcnow().time()),
|
||||
Currency(currency),
|
||||
areas,
|
||||
)
|
||||
|
||||
async def get_price_indices_for_date(
|
||||
client: NordPoolClient,
|
||||
asked_date: date,
|
||||
currency: str,
|
||||
areas: list[str],
|
||||
resolution: int,
|
||||
) -> PriceIndicesData:
|
||||
"""Get prices."""
|
||||
return await client.async_get_price_indices(
|
||||
datetime.combine(asked_date, dt_util.utcnow().time()),
|
||||
Currency(currency),
|
||||
areas,
|
||||
resolution=resolution,
|
||||
)
|
||||
|
||||
async def get_prices(func: Callable, call: ServiceCall) -> ServiceResponse:
|
||||
"""Get price service."""
|
||||
client, asked_date, currency, areas, resolution = get_service_params(call)
|
||||
|
||||
try:
|
||||
price_data = await client.async_get_delivery_period(
|
||||
datetime.combine(asked_date, dt_util.utcnow().time()),
|
||||
Currency(currency),
|
||||
price_data = await func(
|
||||
client,
|
||||
asked_date,
|
||||
currency,
|
||||
areas,
|
||||
resolution,
|
||||
)
|
||||
except NordPoolAuthenticationError as error:
|
||||
raise ServiceValidationError(
|
||||
@ -122,7 +178,14 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_PRICES_FOR_DATE,
|
||||
get_prices_for_date,
|
||||
partial(get_prices, get_prices_for_date),
|
||||
schema=SERVICE_GET_PRICES_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_PRICE_INDICES_FOR_DATE,
|
||||
partial(get_prices, get_price_indices_for_date),
|
||||
schema=SERVICE_GET_PRICE_INDICES_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
@ -46,3 +46,59 @@ get_prices_for_date:
|
||||
- "PLN"
|
||||
- "SEK"
|
||||
mode: dropdown
|
||||
get_price_indices_for_date:
|
||||
fields:
|
||||
config_entry:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: nordpool
|
||||
date:
|
||||
required: true
|
||||
selector:
|
||||
date:
|
||||
areas:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "EE"
|
||||
- "LT"
|
||||
- "LV"
|
||||
- "AT"
|
||||
- "BE"
|
||||
- "FR"
|
||||
- "GER"
|
||||
- "NL"
|
||||
- "PL"
|
||||
- "DK1"
|
||||
- "DK2"
|
||||
- "FI"
|
||||
- "NO1"
|
||||
- "NO2"
|
||||
- "NO3"
|
||||
- "NO4"
|
||||
- "NO5"
|
||||
- "SE1"
|
||||
- "SE2"
|
||||
- "SE3"
|
||||
- "SE4"
|
||||
- "SYS"
|
||||
mode: dropdown
|
||||
currency:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "DKK"
|
||||
- "EUR"
|
||||
- "NOK"
|
||||
- "PLN"
|
||||
- "SEK"
|
||||
mode: dropdown
|
||||
resolution:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "15"
|
||||
- "30"
|
||||
- "60"
|
||||
mode: dropdown
|
||||
|
@ -103,7 +103,7 @@
|
||||
},
|
||||
"date": {
|
||||
"name": "Date",
|
||||
"description": "Only dates two months in the past and one day in the future is allowed."
|
||||
"description": "Only dates in the range from two months in the past to one day in the future are allowed."
|
||||
},
|
||||
"areas": {
|
||||
"name": "Areas",
|
||||
@ -114,6 +114,32 @@
|
||||
"description": "Currency to get prices in. If left empty it will use the currency already configured."
|
||||
}
|
||||
}
|
||||
},
|
||||
"get_price_indices_for_date": {
|
||||
"name": "Get price indices for date",
|
||||
"description": "Retrieves the price indices for a specific date.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::nordpool::services::get_prices_for_date::fields::config_entry::name%]",
|
||||
"description": "[%key:component::nordpool::services::get_prices_for_date::fields::config_entry::description%]"
|
||||
},
|
||||
"date": {
|
||||
"name": "[%key:component::nordpool::services::get_prices_for_date::fields::date::name%]",
|
||||
"description": "[%key:component::nordpool::services::get_prices_for_date::fields::date::description%]"
|
||||
},
|
||||
"areas": {
|
||||
"name": "[%key:component::nordpool::services::get_prices_for_date::fields::areas::name%]",
|
||||
"description": "[%key:component::nordpool::services::get_prices_for_date::fields::areas::description%]"
|
||||
},
|
||||
"currency": {
|
||||
"name": "[%key:component::nordpool::services::get_prices_for_date::fields::currency::name%]",
|
||||
"description": "[%key:component::nordpool::services::get_prices_for_date::fields::currency::description%]"
|
||||
},
|
||||
"resolution": {
|
||||
"name": "Resolution",
|
||||
"description": "Resolution time for the prices, can be any of 15, 30 and 60 minutes."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@ -28,6 +28,7 @@ from .const import (
|
||||
CONF_NUM_CTX,
|
||||
CONF_PROMPT,
|
||||
CONF_THINK,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_NAME,
|
||||
DEFAULT_TIMEOUT,
|
||||
DOMAIN,
|
||||
@ -47,7 +48,7 @@ __all__ = [
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
|
||||
type OllamaConfigEntry = ConfigEntry[ollama.AsyncClient]
|
||||
|
||||
@ -118,6 +119,7 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
parent_entry = api_keys_entries[entry.data[CONF_URL]]
|
||||
|
||||
hass.config_entries.async_add_subentry(parent_entry, subentry)
|
||||
|
||||
conversation_entity = entity_registry.async_get_entity_id(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
@ -208,6 +210,31 @@ async def async_migrate_entry(hass: HomeAssistant, entry: OllamaConfigEntry) ->
|
||||
minor_version=1,
|
||||
)
|
||||
|
||||
if entry.version == 3 and entry.minor_version == 1:
|
||||
# Add AI Task subentry with default options. We can only create a new
|
||||
# subentry if we can find an existing model in the entry. The model
|
||||
# was removed in the previous migration step, so we need to
|
||||
# check the subentries for an existing model.
|
||||
existing_model = next(
|
||||
iter(
|
||||
model
|
||||
for subentry in entry.subentries.values()
|
||||
if (model := subentry.data.get(CONF_MODEL)) is not None
|
||||
),
|
||||
None,
|
||||
)
|
||||
if existing_model:
|
||||
hass.config_entries.async_add_subentry(
|
||||
entry,
|
||||
ConfigSubentry(
|
||||
data=MappingProxyType({CONF_MODEL: existing_model}),
|
||||
subentry_type="ai_task_data",
|
||||
title=DEFAULT_AI_TASK_NAME,
|
||||
unique_id=None,
|
||||
),
|
||||
)
|
||||
hass.config_entries.async_update_entry(entry, minor_version=2)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
77
homeassistant/components/ollama/ai_task.py
Normal file
77
homeassistant/components/ollama/ai_task.py
Normal file
@ -0,0 +1,77 @@
|
||||
"""AI Task integration for Ollama."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
|
||||
from homeassistant.components import ai_task, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .entity import OllamaBaseLLMEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AI Task entities."""
|
||||
for subentry in config_entry.subentries.values():
|
||||
if subentry.subentry_type != "ai_task_data":
|
||||
continue
|
||||
|
||||
async_add_entities(
|
||||
[OllamaTaskEntity(config_entry, subentry)],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class OllamaTaskEntity(
|
||||
ai_task.AITaskEntity,
|
||||
OllamaBaseLLMEntity,
|
||||
):
|
||||
"""Ollama AI Task entity."""
|
||||
|
||||
_attr_supported_features = ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
task: ai_task.GenDataTask,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenDataTaskResult:
|
||||
"""Handle a generate data task."""
|
||||
await self._async_handle_chat_log(chat_log, task.structure)
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
raise HomeAssistantError(
|
||||
"Last content in chat log is not an AssistantContent"
|
||||
)
|
||||
|
||||
text = chat_log.content[-1].content or ""
|
||||
|
||||
if not task.structure:
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=text,
|
||||
)
|
||||
try:
|
||||
data = json_loads(text)
|
||||
except JSONDecodeError as err:
|
||||
_LOGGER.error(
|
||||
"Failed to parse JSON response: %s. Response: %s",
|
||||
err,
|
||||
text,
|
||||
)
|
||||
raise HomeAssistantError("Error with Ollama structured response") from err
|
||||
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=data,
|
||||
)
|
@ -46,6 +46,8 @@ from .const import (
|
||||
CONF_NUM_CTX,
|
||||
CONF_PROMPT,
|
||||
CONF_THINK,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEFAULT_KEEP_ALIVE,
|
||||
DEFAULT_MAX_HISTORY,
|
||||
DEFAULT_MODEL,
|
||||
@ -74,7 +76,7 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ollama."""
|
||||
|
||||
VERSION = 3
|
||||
MINOR_VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize config flow."""
|
||||
@ -136,11 +138,14 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"conversation": ConversationSubentryFlowHandler}
|
||||
return {
|
||||
"conversation": OllamaSubentryFlowHandler,
|
||||
"ai_task_data": OllamaSubentryFlowHandler,
|
||||
}
|
||||
|
||||
|
||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Flow for managing conversation subentries."""
|
||||
class OllamaSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Flow for managing Ollama subentries."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the subentry flow."""
|
||||
@ -201,7 +206,11 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
step_id="set_options",
|
||||
data_schema=vol.Schema(
|
||||
ollama_config_option_schema(
|
||||
self.hass, self._is_new, options, models_to_list
|
||||
self.hass,
|
||||
self._is_new,
|
||||
self._subentry_type,
|
||||
options,
|
||||
models_to_list,
|
||||
)
|
||||
),
|
||||
)
|
||||
@ -300,13 +309,19 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
def ollama_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
is_new: bool,
|
||||
subentry_type: str,
|
||||
options: Mapping[str, Any],
|
||||
models_to_list: list[SelectOptionDict],
|
||||
) -> dict:
|
||||
"""Ollama options schema."""
|
||||
if is_new:
|
||||
if subentry_type == "ai_task_data":
|
||||
default_name = DEFAULT_AI_TASK_NAME
|
||||
else:
|
||||
default_name = DEFAULT_CONVERSATION_NAME
|
||||
|
||||
schema: dict = {
|
||||
vol.Required(CONF_NAME, default="Ollama Conversation"): str,
|
||||
vol.Required(CONF_NAME, default=default_name): str,
|
||||
}
|
||||
else:
|
||||
schema = {}
|
||||
@ -319,29 +334,38 @@ def ollama_config_option_schema(
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(options=models_to_list, custom_value=True)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
}
|
||||
)
|
||||
if subentry_type == "conversation":
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
for api in llm.async_get_apis(hass)
|
||||
],
|
||||
multiple=True,
|
||||
)
|
||||
),
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
)
|
||||
for api in llm.async_get_apis(hass)
|
||||
],
|
||||
multiple=True,
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_NUM_CTX,
|
||||
description={
|
||||
|
@ -159,3 +159,10 @@ MODEL_NAMES = [ # https://ollama.com/library
|
||||
"zephyr",
|
||||
]
|
||||
DEFAULT_MODEL = "llama3.2:latest"
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Ollama Conversation"
|
||||
DEFAULT_AI_TASK_NAME = "Ollama AI Task"
|
||||
|
||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
CONF_MAX_HISTORY: DEFAULT_MAX_HISTORY,
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
import ollama
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
@ -180,6 +181,7 @@ class OllamaBaseLLMEntity(Entity):
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
structure: vol.Schema | None = None,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
settings = {**self.entry.data, **self.subentry.data}
|
||||
@ -200,6 +202,17 @@ class OllamaBaseLLMEntity(Entity):
|
||||
max_messages = int(settings.get(CONF_MAX_HISTORY, DEFAULT_MAX_HISTORY))
|
||||
self._trim_history(message_history, max_messages)
|
||||
|
||||
output_format: dict[str, Any] | None = None
|
||||
if structure:
|
||||
output_format = convert(
|
||||
structure,
|
||||
custom_serializer=(
|
||||
chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer
|
||||
),
|
||||
)
|
||||
|
||||
# Get response
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
@ -214,6 +227,7 @@ class OllamaBaseLLMEntity(Entity):
|
||||
keep_alive=f"{settings.get(CONF_KEEP_ALIVE, DEFAULT_KEEP_ALIVE)}s",
|
||||
options={CONF_NUM_CTX: settings.get(CONF_NUM_CTX, DEFAULT_NUM_CTX)},
|
||||
think=settings.get(CONF_THINK),
|
||||
format=output_format,
|
||||
)
|
||||
except (ollama.RequestError, ollama.ResponseError) as err:
|
||||
_LOGGER.error("Unexpected error talking to Ollama server: %s", err)
|
||||
|
@ -55,6 +55,44 @@
|
||||
"progress": {
|
||||
"download": "Please wait while the model is downloaded, which may take a very long time. Check your Ollama server logs for more details."
|
||||
}
|
||||
},
|
||||
"ai_task_data": {
|
||||
"initiate_flow": {
|
||||
"user": "Add Generate data with AI service",
|
||||
"reconfigure": "Reconfigure Generate data with AI service"
|
||||
},
|
||||
"entry_type": "Generate data with AI service",
|
||||
"step": {
|
||||
"set_options": {
|
||||
"data": {
|
||||
"model": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::model%]",
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"prompt": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::prompt%]",
|
||||
"max_history": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::max_history%]",
|
||||
"num_ctx": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::num_ctx%]",
|
||||
"keep_alive": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::keep_alive%]",
|
||||
"think": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::think%]"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "[%key:component::ollama::config_subentries::conversation::step::set_options::data_description::prompt%]",
|
||||
"keep_alive": "[%key:component::ollama::config_subentries::conversation::step::set_options::data_description::keep_alive%]",
|
||||
"num_ctx": "[%key:component::ollama::config_subentries::conversation::step::set_options::data_description::num_ctx%]",
|
||||
"think": "[%key:component::ollama::config_subentries::conversation::step::set_options::data_description::think%]"
|
||||
}
|
||||
},
|
||||
"download": {
|
||||
"title": "[%key:component::ollama::config_subentries::conversation::step::download::title%]"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"entry_not_loaded": "[%key:component::ollama::config_subentries::conversation::abort::entry_not_loaded%]",
|
||||
"download_failed": "[%key:component::ollama::config_subentries::conversation::abort::download_failed%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"progress": {
|
||||
"download": "[%key:component::ollama::config_subentries::conversation::progress::download%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ from .const import CONF_TOTP_SECRET, CONF_UTILITY, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_UTILITY): vol.In(get_supported_utility_names()),
|
||||
@ -88,9 +89,15 @@ class OpowerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors = await _validate_login(self.hass, user_input)
|
||||
if not errors:
|
||||
return self._async_create_opower_entry(user_input)
|
||||
|
||||
else:
|
||||
user_input = {}
|
||||
user_input.pop(CONF_PASSWORD, None)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_mfa(
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user