mirror of
https://github.com/home-assistant/core.git
synced 2025-07-17 02:07:09 +00:00
Merge branch 'dev' into scop-huawei-lte-diags-support
This commit is contained in:
commit
7a1230e24e
@ -535,6 +535,7 @@ homeassistant.components.unifiprotect.*
|
||||
homeassistant.components.upcloud.*
|
||||
homeassistant.components.update.*
|
||||
homeassistant.components.uptime.*
|
||||
homeassistant.components.uptime_kuma.*
|
||||
homeassistant.components.uptimerobot.*
|
||||
homeassistant.components.usb.*
|
||||
homeassistant.components.uvc.*
|
||||
|
6
CODEOWNERS
generated
6
CODEOWNERS
generated
@ -1658,6 +1658,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/upnp/ @StevenLooman
|
||||
/homeassistant/components/uptime/ @frenck
|
||||
/tests/components/uptime/ @frenck
|
||||
/homeassistant/components/uptime_kuma/ @tr4nt0r
|
||||
/tests/components/uptime_kuma/ @tr4nt0r
|
||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/homeassistant/components/usb/ @bdraco
|
||||
@ -1756,8 +1758,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/wirelesstag/ @sergeymaysak
|
||||
/homeassistant/components/withings/ @joostlek
|
||||
/tests/components/withings/ @joostlek
|
||||
/homeassistant/components/wiz/ @sbidy
|
||||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wiz/ @sbidy @arturpragacz
|
||||
/tests/components/wiz/ @sbidy @arturpragacz
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
|
@ -33,7 +33,7 @@ from .const import (
|
||||
)
|
||||
from .entity import AITaskEntity
|
||||
from .http import async_setup as async_setup_http
|
||||
from .task import GenDataTask, GenDataTaskResult, PlayMediaWithId, async_generate_data
|
||||
from .task import GenDataTask, GenDataTaskResult, async_generate_data
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
@ -41,7 +41,6 @@ __all__ = [
|
||||
"AITaskEntityFeature",
|
||||
"GenDataTask",
|
||||
"GenDataTaskResult",
|
||||
"PlayMediaWithId",
|
||||
"async_generate_data",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
|
@ -13,7 +13,7 @@ from homeassistant.components.conversation import (
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.helpers.chat_session import ChatSession
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@ -56,12 +56,12 @@ class AITaskEntity(RestoreEntity):
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_session(self.hass) as session,
|
||||
async_get_chat_log(
|
||||
self.hass,
|
||||
session,
|
||||
@ -79,19 +79,22 @@ class AITaskEntity(RestoreEntity):
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(UserContent(task.instructions))
|
||||
chat_log.async_add_user_content(
|
||||
UserContent(task.instructions, attachments=task.attachments)
|
||||
)
|
||||
|
||||
yield chat_log
|
||||
|
||||
@final
|
||||
async def internal_async_generate_data(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a gen data task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(task) as chat_log:
|
||||
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
|
||||
return await self._async_generate_data(task, chat_log)
|
||||
|
||||
async def _async_generate_data(
|
||||
|
@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
|
@ -10,6 +10,7 @@ generate_data:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
entity_id:
|
||||
required: false
|
||||
selector:
|
||||
|
@ -2,28 +2,31 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, fields
|
||||
from dataclasses import dataclass
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PlayMediaWithId(media_source.PlayMedia):
|
||||
"""Play media with a media content ID."""
|
||||
|
||||
media_content_id: str
|
||||
"""Media source ID to play."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return media source ID as a string."""
|
||||
return f"<PlayMediaWithId {self.media_content_id}>"
|
||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
@ -52,38 +55,79 @@ async def async_generate_data(
|
||||
)
|
||||
|
||||
# Resolve attachments
|
||||
resolved_attachments: list[PlayMediaWithId] | None = None
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
created_files: list[Path] = []
|
||||
|
||||
if attachments:
|
||||
if AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments = []
|
||||
|
||||
for attachment in attachments:
|
||||
media = await media_source.async_resolve_media(
|
||||
hass, attachment["media_content_id"], None
|
||||
)
|
||||
resolved_attachments.append(
|
||||
PlayMediaWithId(
|
||||
**{
|
||||
field.name: getattr(media, field.name)
|
||||
for field in fields(media)
|
||||
},
|
||||
media_content_id=attachment["media_content_id"],
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
if media.path is None:
|
||||
raise HomeAssistantError(
|
||||
"Only local attachments are currently supported"
|
||||
)
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=media.mime_type,
|
||||
path=media.path,
|
||||
)
|
||||
)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments,
|
||||
with async_get_chat_session(hass) as session:
|
||||
if created_files:
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
@ -99,7 +143,7 @@ class GenDataTask:
|
||||
structure: vol.Schema | None = None
|
||||
"""Optional structure for the data to be generated."""
|
||||
|
||||
attachments: list[PlayMediaWithId] | None = None
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
@ -6,6 +6,5 @@ CONF_RETURN_AVERAGE: Final = "return_average"
|
||||
CONF_CLIP_NEGATIVE: Final = "clip_negatives"
|
||||
DOMAIN: Final = "airq"
|
||||
MANUFACTURER: Final = "CorantGmbH"
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³"
|
||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³"
|
||||
UPDATE_INTERVAL: float = 10.0
|
||||
|
@ -4,9 +4,6 @@
|
||||
"health_index": {
|
||||
"default": "mdi:heart-pulse"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
"oxygen": {
|
||||
"default": "mdi:leaf"
|
||||
},
|
||||
|
@ -14,6 +14,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
@ -28,10 +29,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AirQConfigEntry, AirQCoordinator
|
||||
from .const import (
|
||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER,
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
)
|
||||
from .const import ACTIVITY_BECQUEREL_PER_CUBIC_METER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -195,7 +193,7 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="humidity_abs",
|
||||
translation_key="absolute_humidity",
|
||||
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY,
|
||||
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("humidity_abs"),
|
||||
|
@ -93,9 +93,6 @@
|
||||
"health_index": {
|
||||
"name": "Health index"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"name": "Absolute humidity"
|
||||
},
|
||||
"hydrogen": {
|
||||
"name": "Hydrogen"
|
||||
},
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
||||
"requirements": ["aioairzone-cloud==0.6.13"]
|
||||
}
|
||||
|
@ -505,8 +505,13 @@ class ClimateCapabilities(AlexaEntity):
|
||||
):
|
||||
yield AlexaThermostatController(self.hass, self.entity)
|
||||
yield AlexaTemperatureSensor(self.hass, self.entity)
|
||||
if self.entity.domain == water_heater.DOMAIN and (
|
||||
supported_features & water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
if (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
and self.entity.attributes.get(water_heater.ATTR_OPERATION_LIST)
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity,
|
||||
@ -634,7 +639,9 @@ class FanCapabilities(AlexaEntity):
|
||||
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}"
|
||||
)
|
||||
force_range_controller = False
|
||||
if supported & fan.FanEntityFeature.PRESET_MODE:
|
||||
if supported & fan.FanEntityFeature.PRESET_MODE and self.entity.attributes.get(
|
||||
fan.ATTR_PRESET_MODES
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}"
|
||||
)
|
||||
@ -672,7 +679,11 @@ class RemoteCapabilities(AlexaEntity):
|
||||
yield AlexaPowerController(self.entity)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
|
||||
if activities and supported & remote.RemoteEntityFeature.ACTIVITY:
|
||||
if (
|
||||
activities
|
||||
and (supported & remote.RemoteEntityFeature.ACTIVITY)
|
||||
and self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
|
||||
)
|
||||
@ -692,7 +703,9 @@ class HumidifierCapabilities(AlexaEntity):
|
||||
"""Yield the supported interfaces."""
|
||||
yield AlexaPowerController(self.entity)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
if supported & humidifier.HumidifierEntityFeature.MODES:
|
||||
if (
|
||||
supported & humidifier.HumidifierEntityFeature.MODES
|
||||
) and self.entity.attributes.get(humidifier.ATTR_AVAILABLE_MODES):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{humidifier.DOMAIN}.{humidifier.ATTR_MODE}"
|
||||
)
|
||||
|
@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioamazondevices==3.2.8"]
|
||||
"requirements": ["aioamazondevices==3.2.10"]
|
||||
}
|
||||
|
@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["amcrest"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["amcrest==1.9.8"]
|
||||
"requirements": ["amcrest==1.9.9"]
|
||||
}
|
||||
|
@ -10,9 +10,9 @@ DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-haiku-20240307"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-5-haiku-latest"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 1024
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
|
@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.0"],
|
||||
"requirements": ["pyatv==0.16.1"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
@ -15,12 +15,12 @@
|
||||
],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bleak==1.0.1",
|
||||
"bleak-retry-connector==4.0.0",
|
||||
"bluetooth-adapters==2.0.0",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.49.0"
|
||||
"habluetooth==4.0.1"
|
||||
]
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ DOMAINS_AND_TYPES = {
|
||||
Platform.SELECT: {"HYS"},
|
||||
Platform.SENSOR: {
|
||||
"A1",
|
||||
"A2",
|
||||
"MP1S",
|
||||
"RM4MINI",
|
||||
"RM4PRO",
|
||||
|
@ -10,6 +10,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
PERCENTAGE,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
@ -34,6 +35,24 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
key="air_quality",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm10",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm2_5",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
|
@ -25,6 +25,7 @@ def get_update_manager(device: BroadlinkDevice[_ApiT]) -> BroadlinkUpdateManager
|
||||
"""Return an update manager for a given Broadlink device."""
|
||||
update_managers: dict[str, type[BroadlinkUpdateManager]] = {
|
||||
"A1": BroadlinkA1UpdateManager,
|
||||
"A2": BroadlinkA2UpdateManager,
|
||||
"BG1": BroadlinkBG1UpdateManager,
|
||||
"HYS": BroadlinkThermostatUpdateManager,
|
||||
"LB1": BroadlinkLB1UpdateManager,
|
||||
@ -118,6 +119,16 @@ class BroadlinkA1UpdateManager(BroadlinkUpdateManager[blk.a1]):
|
||||
return await self.device.async_request(self.device.api.check_sensors_raw)
|
||||
|
||||
|
||||
class BroadlinkA2UpdateManager(BroadlinkUpdateManager[blk.a2]):
|
||||
"""Manages updates for Broadlink A2 devices."""
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
async def async_fetch_data(self) -> dict[str, Any]:
|
||||
"""Fetch data from the device."""
|
||||
return await self.device.async_request(self.device.api.check_sensors_raw)
|
||||
|
||||
|
||||
class BroadlinkMP1UpdateManager(BroadlinkUpdateManager[blk.mp1]):
|
||||
"""Manages updates for Broadlink MP1 devices."""
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"requirements": ["brother==4.3.1"],
|
||||
"requirements": ["brother==5.0.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
@ -12,6 +12,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNA
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_PASSKEY, DEFAULT_PORT, DOMAIN
|
||||
|
||||
@ -21,12 +22,15 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
host: str
|
||||
port: int
|
||||
mac: str
|
||||
passkey: str | None = None
|
||||
username: str | None = None
|
||||
password: str | None = None
|
||||
def __init__(self) -> None:
|
||||
"""Initialize BSBLan flow."""
|
||||
self.host: str | None = None
|
||||
self.port: int = DEFAULT_PORT
|
||||
self.mac: str | None = None
|
||||
self.passkey: str | None = None
|
||||
self.username: str | None = None
|
||||
self.password: str | None = None
|
||||
self._auth_required = True
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -41,9 +45,111 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.username = user_input.get(CONF_USERNAME)
|
||||
self.password = user_input.get(CONF_PASSWORD)
|
||||
|
||||
return await self._validate_and_create()
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle Zeroconf discovery."""
|
||||
|
||||
self.host = str(discovery_info.ip_address)
|
||||
self.port = discovery_info.port or DEFAULT_PORT
|
||||
|
||||
# Get MAC from properties
|
||||
self.mac = discovery_info.properties.get("mac")
|
||||
|
||||
# If MAC was found in zeroconf, use it immediately
|
||||
if self.mac:
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
CONF_PORT: self.port,
|
||||
}
|
||||
)
|
||||
else:
|
||||
# MAC not available from zeroconf - check for existing host/port first
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: self.host, CONF_PORT: self.port}
|
||||
)
|
||||
|
||||
# Try to get device info without authentication to minimize discovery popup
|
||||
config = BSBLANConfig(host=self.host, port=self.port)
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
try:
|
||||
device = await bsblan.device()
|
||||
except BSBLANError:
|
||||
# Device requires authentication - proceed to discovery confirm
|
||||
self.mac = None
|
||||
else:
|
||||
self.mac = device.MAC
|
||||
|
||||
# Got MAC without auth - set unique ID and check for existing device
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
CONF_PORT: self.port,
|
||||
}
|
||||
)
|
||||
# No auth needed, so we can proceed to a confirmation step without fields
|
||||
self._auth_required = False
|
||||
|
||||
# Proceed to get credentials
|
||||
self.context["title_placeholders"] = {"name": f"BSBLAN {self.host}"}
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle getting credentials for discovered device."""
|
||||
if user_input is None:
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_PASSKEY): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
if not self._auth_required:
|
||||
data_schema = vol.Schema({})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"host": str(self.host)},
|
||||
)
|
||||
|
||||
if not self._auth_required:
|
||||
return self._async_create_entry()
|
||||
|
||||
self.passkey = user_input.get(CONF_PASSKEY)
|
||||
self.username = user_input.get(CONF_USERNAME)
|
||||
self.password = user_input.get(CONF_PASSWORD)
|
||||
|
||||
return await self._validate_and_create(is_discovery=True)
|
||||
|
||||
async def _validate_and_create(
|
||||
self, is_discovery: bool = False
|
||||
) -> ConfigFlowResult:
|
||||
"""Validate device connection and create entry."""
|
||||
try:
|
||||
await self._get_bsblan_info()
|
||||
await self._get_bsblan_info(is_discovery=is_discovery)
|
||||
except BSBLANError:
|
||||
if is_discovery:
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_PASSKEY): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
errors={"base": "cannot_connect"},
|
||||
description_placeholders={"host": str(self.host)},
|
||||
)
|
||||
return self._show_setup_form({"base": "cannot_connect"})
|
||||
|
||||
return self._async_create_entry()
|
||||
@ -67,6 +173,7 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@callback
|
||||
def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create the config entry."""
|
||||
return self.async_create_entry(
|
||||
title=format_mac(self.mac),
|
||||
data={
|
||||
@ -78,8 +185,10 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
async def _get_bsblan_info(self, raise_on_progress: bool = True) -> None:
|
||||
"""Get device information from an BSBLAN device."""
|
||||
async def _get_bsblan_info(
|
||||
self, raise_on_progress: bool = True, is_discovery: bool = False
|
||||
) -> None:
|
||||
"""Get device information from a BSBLAN device."""
|
||||
config = BSBLANConfig(
|
||||
host=self.host,
|
||||
passkey=self.passkey,
|
||||
@ -90,11 +199,18 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
device = await bsblan.device()
|
||||
self.mac = device.MAC
|
||||
retrieved_mac = device.MAC
|
||||
|
||||
await self.async_set_unique_id(
|
||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||
)
|
||||
# Handle unique ID assignment based on whether MAC was available from zeroconf
|
||||
if not self.mac:
|
||||
# MAC wasn't available from zeroconf, now we have it from API
|
||||
self.mac = retrieved_mac
|
||||
await self.async_set_unique_id(
|
||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||
)
|
||||
|
||||
# Always allow updating host/port for both user and discovery flows
|
||||
# This ensures connectivity is maintained when devices change IP addresses
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
|
@ -7,5 +7,11 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==2.1.0"]
|
||||
"requirements": ["python-bsblan==2.1.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
"name": "bsb-lan*"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -20,6 +20,8 @@ from . import BSBLanConfigEntry, BSBLanData
|
||||
from .coordinator import BSBLanCoordinatorData
|
||||
from .entity import BSBLanEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class BSBLanSensorEntityDescription(SensorEntityDescription):
|
||||
|
@ -13,7 +13,25 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your BSB-Lan device."
|
||||
"host": "The hostname or IP address of your BSB-Lan device.",
|
||||
"port": "The port number of your BSB-Lan device.",
|
||||
"passkey": "The passkey for your BSB-Lan device.",
|
||||
"username": "The username for your BSB-Lan device.",
|
||||
"password": "The password for your BSB-Lan device."
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"title": "BSB-Lan device discovered",
|
||||
"description": "A BSB-Lan device was discovered at {host}. Please provide credentials if required.",
|
||||
"data": {
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data_description::passkey%]",
|
||||
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
|
||||
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -34,6 +34,7 @@ from .agent_manager import (
|
||||
from .chat_log import (
|
||||
AssistantContent,
|
||||
AssistantContentDeltaDict,
|
||||
Attachment,
|
||||
ChatLog,
|
||||
Content,
|
||||
ConverseError,
|
||||
@ -66,6 +67,7 @@ __all__ = [
|
||||
"HOME_ASSISTANT_AGENT",
|
||||
"AssistantContent",
|
||||
"AssistantContentDeltaDict",
|
||||
"Attachment",
|
||||
"ChatLog",
|
||||
"Content",
|
||||
"ConversationEntity",
|
||||
|
@ -8,6 +8,7 @@ from contextlib import contextmanager
|
||||
from contextvars import ContextVar
|
||||
from dataclasses import asdict, dataclass, field, replace
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TypedDict
|
||||
|
||||
import voluptuous as vol
|
||||
@ -136,6 +137,21 @@ class UserContent:
|
||||
|
||||
role: Literal["user"] = field(init=False, default="user")
|
||||
content: str
|
||||
attachments: list[Attachment] | None = field(default=None)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Attachment:
|
||||
"""Attachment for a chat message."""
|
||||
|
||||
media_content_id: str
|
||||
"""Media content ID of the attachment."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the attachment."""
|
||||
|
||||
path: Path
|
||||
"""Path to the attachment on disk."""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_SOURCE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -9,12 +11,18 @@ from homeassistant.helpers.device import (
|
||||
async_entity_id_to_device_id,
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.helper_integration import async_handle_source_entity_changes
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Derivative from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass, entry.entry_id, entry.options[CONF_SOURCE]
|
||||
)
|
||||
@ -25,20 +33,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
options={**entry.options, CONF_SOURCE: source_entity_id},
|
||||
)
|
||||
|
||||
async def source_entity_removed() -> None:
|
||||
# The source entity has been removed, we need to clean the device links.
|
||||
async_remove_stale_devices_links_keep_entity_device(hass, entry.entry_id, None)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_handle_source_entity_changes(
|
||||
hass,
|
||||
add_helper_config_entry_to_device=False,
|
||||
helper_config_entry_id=entry.entry_id,
|
||||
set_source_entity_id_or_uuid=set_source_entity_id_or_uuid,
|
||||
source_device_id=async_entity_id_to_device_id(
|
||||
hass, entry.options[CONF_SOURCE]
|
||||
),
|
||||
source_entity_id_or_uuid=entry.options[CONF_SOURCE],
|
||||
source_entity_removed=source_entity_removed,
|
||||
)
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
|
||||
@ -54,3 +58,51 @@ async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry)
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if config_entry.version == 1:
|
||||
if config_entry.minor_version < 2:
|
||||
new_options = {**config_entry.options}
|
||||
|
||||
if new_options.get("unit_prefix") == "none":
|
||||
# Before we had support for optional selectors, "none" was used for selecting nothing
|
||||
del new_options["unit_prefix"]
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, options=new_options, version=1, minor_version=2
|
||||
)
|
||||
|
||||
if config_entry.minor_version < 3:
|
||||
# Remove the derivative config entry from the source device
|
||||
if source_device_id := async_entity_id_to_device_id(
|
||||
hass, config_entry.options[CONF_SOURCE]
|
||||
):
|
||||
async_remove_helper_config_entry_from_source_device(
|
||||
hass,
|
||||
helper_config_entry_id=config_entry.entry_id,
|
||||
source_device_id=source_device_id,
|
||||
)
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, version=1, minor_version=3
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
@ -141,6 +141,9 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 3
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
return cast(str, options[CONF_NAME])
|
||||
|
@ -34,8 +34,7 @@ from homeassistant.core import (
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.device import async_entity_id_to_device
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -118,30 +117,21 @@ async def async_setup_entry(
|
||||
registry, config_entry.options[CONF_SOURCE]
|
||||
)
|
||||
|
||||
device_info = async_device_info_to_link_from_entity(
|
||||
hass,
|
||||
source_entity_id,
|
||||
)
|
||||
|
||||
if (unit_prefix := config_entry.options.get(CONF_UNIT_PREFIX)) == "none":
|
||||
# Before we had support for optional selectors, "none" was used for selecting nothing
|
||||
unit_prefix = None
|
||||
|
||||
if max_sub_interval_dict := config_entry.options.get(CONF_MAX_SUB_INTERVAL, None):
|
||||
max_sub_interval = cv.time_period(max_sub_interval_dict)
|
||||
else:
|
||||
max_sub_interval = None
|
||||
|
||||
derivative_sensor = DerivativeSensor(
|
||||
hass,
|
||||
name=config_entry.title,
|
||||
round_digits=int(config_entry.options[CONF_ROUND_DIGITS]),
|
||||
source_entity=source_entity_id,
|
||||
time_window=cv.time_period_dict(config_entry.options[CONF_TIME_WINDOW]),
|
||||
unique_id=config_entry.entry_id,
|
||||
unit_of_measurement=None,
|
||||
unit_prefix=unit_prefix,
|
||||
unit_prefix=config_entry.options.get(CONF_UNIT_PREFIX),
|
||||
unit_time=config_entry.options[CONF_UNIT_TIME],
|
||||
device_info=device_info,
|
||||
max_sub_interval=max_sub_interval,
|
||||
)
|
||||
|
||||
@ -156,6 +146,7 @@ async def async_setup_platform(
|
||||
) -> None:
|
||||
"""Set up the derivative sensor."""
|
||||
derivative = DerivativeSensor(
|
||||
hass,
|
||||
name=config.get(CONF_NAME),
|
||||
round_digits=config[CONF_ROUND_DIGITS],
|
||||
source_entity=config[CONF_SOURCE],
|
||||
@ -178,6 +169,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
name: str | None,
|
||||
round_digits: int,
|
||||
@ -188,11 +180,13 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
unit_time: UnitOfTime,
|
||||
max_sub_interval: timedelta | None,
|
||||
unique_id: str | None,
|
||||
device_info: DeviceInfo | None = None,
|
||||
) -> None:
|
||||
"""Initialize the derivative sensor."""
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = device_info
|
||||
self.device_entry = async_entity_id_to_device(
|
||||
hass,
|
||||
source_entity,
|
||||
)
|
||||
self._sensor_source_id = source_entity
|
||||
self._round_digits = round_digits
|
||||
self._attr_native_value = round(Decimal(0), round_digits)
|
||||
|
@ -25,7 +25,8 @@ PLATFORMS: list[Platform] = [Platform.TTS]
|
||||
|
||||
async def get_model_by_id(client: AsyncElevenLabs, model_id: str) -> Model | None:
|
||||
"""Get ElevenLabs model from their API by the model_id."""
|
||||
models = await client.models.get_all()
|
||||
models = await client.models.list()
|
||||
|
||||
for maybe_model in models:
|
||||
if maybe_model.model_id == model_id:
|
||||
return maybe_model
|
||||
|
@ -23,14 +23,12 @@ from . import ElevenLabsConfigEntry
|
||||
from .const import (
|
||||
CONF_CONFIGURE_VOICE,
|
||||
CONF_MODEL,
|
||||
CONF_OPTIMIZE_LATENCY,
|
||||
CONF_SIMILARITY,
|
||||
CONF_STABILITY,
|
||||
CONF_STYLE,
|
||||
CONF_USE_SPEAKER_BOOST,
|
||||
CONF_VOICE,
|
||||
DEFAULT_MODEL,
|
||||
DEFAULT_OPTIMIZE_LATENCY,
|
||||
DEFAULT_SIMILARITY,
|
||||
DEFAULT_STABILITY,
|
||||
DEFAULT_STYLE,
|
||||
@ -51,7 +49,8 @@ async def get_voices_models(
|
||||
httpx_client = get_async_client(hass)
|
||||
client = AsyncElevenLabs(api_key=api_key, httpx_client=httpx_client)
|
||||
voices = (await client.voices.get_all()).voices
|
||||
models = await client.models.get_all()
|
||||
models = await client.models.list()
|
||||
|
||||
voices_dict = {
|
||||
voice.voice_id: voice.name
|
||||
for voice in sorted(voices, key=lambda v: v.name or "")
|
||||
@ -78,8 +77,13 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
voices, _ = await get_voices_models(self.hass, user_input[CONF_API_KEY])
|
||||
except ApiError:
|
||||
errors["base"] = "invalid_api_key"
|
||||
except ApiError as exc:
|
||||
errors["base"] = "unknown"
|
||||
details = getattr(exc, "body", {}).get("detail", {})
|
||||
if details:
|
||||
status = details.get("status")
|
||||
if status == "invalid_api_key":
|
||||
errors["base"] = "invalid_api_key"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title="ElevenLabs",
|
||||
@ -206,12 +210,6 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
vol.Coerce(float),
|
||||
vol.Range(min=0, max=1),
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_OPTIMIZE_LATENCY,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY
|
||||
),
|
||||
): vol.All(int, vol.Range(min=0, max=4)),
|
||||
vol.Optional(
|
||||
CONF_STYLE,
|
||||
default=self.config_entry.options.get(CONF_STYLE, DEFAULT_STYLE),
|
||||
|
@ -7,7 +7,6 @@ CONF_MODEL = "model"
|
||||
CONF_CONFIGURE_VOICE = "configure_voice"
|
||||
CONF_STABILITY = "stability"
|
||||
CONF_SIMILARITY = "similarity"
|
||||
CONF_OPTIMIZE_LATENCY = "optimize_streaming_latency"
|
||||
CONF_STYLE = "style"
|
||||
CONF_USE_SPEAKER_BOOST = "use_speaker_boost"
|
||||
DOMAIN = "elevenlabs"
|
||||
@ -15,6 +14,5 @@ DOMAIN = "elevenlabs"
|
||||
DEFAULT_MODEL = "eleven_multilingual_v2"
|
||||
DEFAULT_STABILITY = 0.5
|
||||
DEFAULT_SIMILARITY = 0.75
|
||||
DEFAULT_OPTIMIZE_LATENCY = 0
|
||||
DEFAULT_STYLE = 0
|
||||
DEFAULT_USE_SPEAKER_BOOST = True
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["elevenlabs"],
|
||||
"requirements": ["elevenlabs==1.9.0"]
|
||||
"requirements": ["elevenlabs==2.3.0"]
|
||||
}
|
||||
|
@ -11,7 +11,8 @@
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]"
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@ -32,14 +33,12 @@
|
||||
"data": {
|
||||
"stability": "Stability",
|
||||
"similarity": "Similarity",
|
||||
"optimize_streaming_latency": "Latency",
|
||||
"style": "Style",
|
||||
"use_speaker_boost": "Speaker boost"
|
||||
},
|
||||
"data_description": {
|
||||
"stability": "Stability of the generated audio. Higher values lead to less emotional audio.",
|
||||
"similarity": "Similarity of the generated audio to the original voice. Higher values may result in more similar audio, but may also introduce background noise.",
|
||||
"optimize_streaming_latency": "Optimize the model for streaming. This may reduce the quality of the generated audio.",
|
||||
"style": "Style of the generated audio. Recommended to keep at 0 for most almost all use cases.",
|
||||
"use_speaker_boost": "Use speaker boost to increase the similarity of the generated audio to the original voice."
|
||||
}
|
||||
|
@ -25,13 +25,11 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import ElevenLabsConfigEntry
|
||||
from .const import (
|
||||
ATTR_MODEL,
|
||||
CONF_OPTIMIZE_LATENCY,
|
||||
CONF_SIMILARITY,
|
||||
CONF_STABILITY,
|
||||
CONF_STYLE,
|
||||
CONF_USE_SPEAKER_BOOST,
|
||||
CONF_VOICE,
|
||||
DEFAULT_OPTIMIZE_LATENCY,
|
||||
DEFAULT_SIMILARITY,
|
||||
DEFAULT_STABILITY,
|
||||
DEFAULT_STYLE,
|
||||
@ -75,9 +73,6 @@ async def async_setup_entry(
|
||||
config_entry.entry_id,
|
||||
config_entry.title,
|
||||
voice_settings,
|
||||
config_entry.options.get(
|
||||
CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
@ -98,7 +93,6 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
entry_id: str,
|
||||
title: str,
|
||||
voice_settings: VoiceSettings,
|
||||
latency: int = 0,
|
||||
) -> None:
|
||||
"""Init ElevenLabs TTS service."""
|
||||
self._client = client
|
||||
@ -115,7 +109,6 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
if voice_indices:
|
||||
self._voices.insert(0, self._voices.pop(voice_indices[0]))
|
||||
self._voice_settings = voice_settings
|
||||
self._latency = latency
|
||||
|
||||
# Entity attributes
|
||||
self._attr_unique_id = entry_id
|
||||
@ -144,14 +137,14 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
voice_id = options.get(ATTR_VOICE, self._default_voice_id)
|
||||
model = options.get(ATTR_MODEL, self._model.model_id)
|
||||
try:
|
||||
audio = await self._client.generate(
|
||||
audio = self._client.text_to_speech.convert(
|
||||
text=message,
|
||||
voice=voice_id,
|
||||
optimize_streaming_latency=self._latency,
|
||||
voice_id=voice_id,
|
||||
voice_settings=self._voice_settings,
|
||||
model=model,
|
||||
model_id=model,
|
||||
)
|
||||
bytes_combined = b"".join([byte_seg async for byte_seg in audio])
|
||||
|
||||
except ApiError as exc:
|
||||
_LOGGER.warning(
|
||||
"Error during processing of TTS request %s", exc, exc_info=True
|
||||
|
@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==2.1.0", "bleak-esphome==2.16.0"]
|
||||
"requirements": ["eq3btsmart==2.1.0", "bleak-esphome==3.1.0"]
|
||||
}
|
||||
|
@ -100,49 +100,70 @@ class EsphomeAlarmControlPanel(
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
self._client.alarm_control_panel_command(
|
||||
self._key, AlarmControlPanelCommand.DISARM, code
|
||||
self._key,
|
||||
AlarmControlPanelCommand.DISARM,
|
||||
code,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send arm home command."""
|
||||
self._client.alarm_control_panel_command(
|
||||
self._key, AlarmControlPanelCommand.ARM_HOME, code
|
||||
self._key,
|
||||
AlarmControlPanelCommand.ARM_HOME,
|
||||
code,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
self._client.alarm_control_panel_command(
|
||||
self._key, AlarmControlPanelCommand.ARM_AWAY, code
|
||||
self._key,
|
||||
AlarmControlPanelCommand.ARM_AWAY,
|
||||
code,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
self._client.alarm_control_panel_command(
|
||||
self._key, AlarmControlPanelCommand.ARM_NIGHT, code
|
||||
self._key,
|
||||
AlarmControlPanelCommand.ARM_NIGHT,
|
||||
code,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
self._client.alarm_control_panel_command(
|
||||
self._key, AlarmControlPanelCommand.ARM_CUSTOM_BYPASS, code
|
||||
self._key,
|
||||
AlarmControlPanelCommand.ARM_CUSTOM_BYPASS,
|
||||
code,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_alarm_arm_vacation(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
self._client.alarm_control_panel_command(
|
||||
self._key, AlarmControlPanelCommand.ARM_VACATION, code
|
||||
self._key,
|
||||
AlarmControlPanelCommand.ARM_VACATION,
|
||||
code,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_alarm_trigger(self, code: str | None = None) -> None:
|
||||
"""Send alarm trigger command."""
|
||||
self._client.alarm_control_panel_command(
|
||||
self._key, AlarmControlPanelCommand.TRIGGER, code
|
||||
self._key,
|
||||
AlarmControlPanelCommand.TRIGGER,
|
||||
code,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
|
@ -48,7 +48,7 @@ class EsphomeButton(EsphomeEntity[ButtonInfo, EntityState], ButtonEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
self._client.button_command(self._key)
|
||||
self._client.button_command(self._key, device_id=self._static_info.device_id)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -287,18 +287,24 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
data["target_temperature_low"] = kwargs[ATTR_TARGET_TEMP_LOW]
|
||||
if ATTR_TARGET_TEMP_HIGH in kwargs:
|
||||
data["target_temperature_high"] = kwargs[ATTR_TARGET_TEMP_HIGH]
|
||||
self._client.climate_command(**data)
|
||||
self._client.climate_command(**data, device_id=self._static_info.device_id)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set new target humidity."""
|
||||
self._client.climate_command(key=self._key, target_humidity=humidity)
|
||||
self._client.climate_command(
|
||||
key=self._key,
|
||||
target_humidity=humidity,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target operation mode."""
|
||||
self._client.climate_command(
|
||||
key=self._key, mode=_CLIMATE_MODES.from_hass(hvac_mode)
|
||||
key=self._key,
|
||||
mode=_CLIMATE_MODES.from_hass(hvac_mode),
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
@ -309,7 +315,7 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
kwargs["custom_preset"] = preset_mode
|
||||
else:
|
||||
kwargs["preset"] = _PRESETS.from_hass(preset_mode)
|
||||
self._client.climate_command(**kwargs)
|
||||
self._client.climate_command(**kwargs, device_id=self._static_info.device_id)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
@ -319,13 +325,15 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
kwargs["custom_fan_mode"] = fan_mode
|
||||
else:
|
||||
kwargs["fan_mode"] = _FAN_MODES.from_hass(fan_mode)
|
||||
self._client.climate_command(**kwargs)
|
||||
self._client.climate_command(**kwargs, device_id=self._static_info.device_id)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
"""Set new swing mode."""
|
||||
self._client.climate_command(
|
||||
key=self._key, swing_mode=_SWING_MODES.from_hass(swing_mode)
|
||||
key=self._key,
|
||||
swing_mode=_SWING_MODES.from_hass(swing_mode),
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
|
@ -90,38 +90,56 @@ class EsphomeCover(EsphomeEntity[CoverInfo, CoverState], CoverEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the cover."""
|
||||
self._client.cover_command(key=self._key, position=1.0)
|
||||
self._client.cover_command(
|
||||
key=self._key, position=1.0, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close cover."""
|
||||
self._client.cover_command(key=self._key, position=0.0)
|
||||
self._client.cover_command(
|
||||
key=self._key, position=0.0, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_stop_cover(self, **kwargs: Any) -> None:
|
||||
"""Stop the cover."""
|
||||
self._client.cover_command(key=self._key, stop=True)
|
||||
self._client.cover_command(
|
||||
key=self._key, stop=True, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_cover_position(self, **kwargs: Any) -> None:
|
||||
"""Move the cover to a specific position."""
|
||||
self._client.cover_command(key=self._key, position=kwargs[ATTR_POSITION] / 100)
|
||||
self._client.cover_command(
|
||||
key=self._key,
|
||||
position=kwargs[ATTR_POSITION] / 100,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Open the cover tilt."""
|
||||
self._client.cover_command(key=self._key, tilt=1.0)
|
||||
self._client.cover_command(
|
||||
key=self._key, tilt=1.0, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_close_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Close the cover tilt."""
|
||||
self._client.cover_command(key=self._key, tilt=0.0)
|
||||
self._client.cover_command(
|
||||
key=self._key, tilt=0.0, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_cover_tilt_position(self, **kwargs: Any) -> None:
|
||||
"""Move the cover tilt to a specific position."""
|
||||
tilt_position: int = kwargs[ATTR_TILT_POSITION]
|
||||
self._client.cover_command(key=self._key, tilt=tilt_position / 100)
|
||||
self._client.cover_command(
|
||||
key=self._key,
|
||||
tilt=tilt_position / 100,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -28,7 +28,13 @@ class EsphomeDate(EsphomeEntity[DateInfo, DateState], DateEntity):
|
||||
|
||||
async def async_set_value(self, value: date) -> None:
|
||||
"""Update the current date."""
|
||||
self._client.date_command(self._key, value.year, value.month, value.day)
|
||||
self._client.date_command(
|
||||
self._key,
|
||||
value.year,
|
||||
value.month,
|
||||
value.day,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -29,7 +29,9 @@ class EsphomeDateTime(EsphomeEntity[DateTimeInfo, DateTimeState], DateTimeEntity
|
||||
|
||||
async def async_set_value(self, value: datetime) -> None:
|
||||
"""Update the current datetime."""
|
||||
self._client.datetime_command(self._key, int(value.timestamp()))
|
||||
self._client.datetime_command(
|
||||
self._key, int(value.timestamp()), device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -71,7 +71,7 @@ class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity):
|
||||
ORDERED_NAMED_FAN_SPEEDS, percentage
|
||||
)
|
||||
data["speed"] = named_speed
|
||||
self._client.fan_command(**data)
|
||||
self._client.fan_command(**data, device_id=self._static_info.device_id)
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
@ -85,24 +85,36 @@ class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the fan."""
|
||||
self._client.fan_command(key=self._key, state=False)
|
||||
self._client.fan_command(
|
||||
key=self._key, state=False, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_oscillate(self, oscillating: bool) -> None:
|
||||
"""Oscillate the fan."""
|
||||
self._client.fan_command(key=self._key, oscillating=oscillating)
|
||||
self._client.fan_command(
|
||||
key=self._key,
|
||||
oscillating=oscillating,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_direction(self, direction: str) -> None:
|
||||
"""Set direction of the fan."""
|
||||
self._client.fan_command(
|
||||
key=self._key, direction=_FAN_DIRECTIONS.from_hass(direction)
|
||||
key=self._key,
|
||||
direction=_FAN_DIRECTIONS.from_hass(direction),
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set the preset mode of the fan."""
|
||||
self._client.fan_command(key=self._key, preset_mode=preset_mode)
|
||||
self._client.fan_command(
|
||||
key=self._key,
|
||||
preset_mode=preset_mode,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@property
|
||||
@esphome_state_property
|
||||
|
@ -280,7 +280,7 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
# (fewest capabilities set)
|
||||
data["color_mode"] = _least_complex_color_mode(color_modes)
|
||||
|
||||
self._client.light_command(**data)
|
||||
self._client.light_command(**data, device_id=self._static_info.device_id)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
@ -290,7 +290,7 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
data["flash_length"] = FLASH_LENGTHS[kwargs[ATTR_FLASH]]
|
||||
if ATTR_TRANSITION in kwargs:
|
||||
data["transition_length"] = kwargs[ATTR_TRANSITION]
|
||||
self._client.light_command(**data)
|
||||
self._client.light_command(**data, device_id=self._static_info.device_id)
|
||||
|
||||
@property
|
||||
@esphome_state_property
|
||||
|
@ -65,18 +65,24 @@ class EsphomeLock(EsphomeEntity[LockInfo, LockEntityState], LockEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Lock the lock."""
|
||||
self._client.lock_command(self._key, LockCommand.LOCK)
|
||||
self._client.lock_command(
|
||||
self._key, LockCommand.LOCK, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Unlock the lock."""
|
||||
code = kwargs.get(ATTR_CODE)
|
||||
self._client.lock_command(self._key, LockCommand.UNLOCK, code)
|
||||
self._client.lock_command(
|
||||
self._key, LockCommand.UNLOCK, code, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_open(self, **kwargs: Any) -> None:
|
||||
"""Open the door latch."""
|
||||
self._client.lock_command(self._key, LockCommand.OPEN)
|
||||
self._client.lock_command(
|
||||
self._key, LockCommand.OPEN, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -17,9 +17,9 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==34.2.0",
|
||||
"aioesphomeapi==35.0.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==2.16.0"
|
||||
"bleak-esphome==3.1.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
@ -132,7 +132,10 @@ class EsphomeMediaPlayer(
|
||||
media_id = proxy_url
|
||||
|
||||
self._client.media_player_command(
|
||||
self._key, media_url=media_id, announcement=announcement
|
||||
self._key,
|
||||
media_url=media_id,
|
||||
announcement=announcement,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
@ -214,22 +217,36 @@ class EsphomeMediaPlayer(
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Set volume level, range 0..1."""
|
||||
self._client.media_player_command(self._key, volume=volume)
|
||||
self._client.media_player_command(
|
||||
self._key, volume=volume, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_media_pause(self) -> None:
|
||||
"""Send pause command."""
|
||||
self._client.media_player_command(self._key, command=MediaPlayerCommand.PAUSE)
|
||||
self._client.media_player_command(
|
||||
self._key,
|
||||
command=MediaPlayerCommand.PAUSE,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_media_play(self) -> None:
|
||||
"""Send play command."""
|
||||
self._client.media_player_command(self._key, command=MediaPlayerCommand.PLAY)
|
||||
self._client.media_player_command(
|
||||
self._key,
|
||||
command=MediaPlayerCommand.PLAY,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_media_stop(self) -> None:
|
||||
"""Send stop command."""
|
||||
self._client.media_player_command(self._key, command=MediaPlayerCommand.STOP)
|
||||
self._client.media_player_command(
|
||||
self._key,
|
||||
command=MediaPlayerCommand.STOP,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_mute_volume(self, mute: bool) -> None:
|
||||
@ -237,6 +254,7 @@ class EsphomeMediaPlayer(
|
||||
self._client.media_player_command(
|
||||
self._key,
|
||||
command=MediaPlayerCommand.MUTE if mute else MediaPlayerCommand.UNMUTE,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
|
@ -67,7 +67,9 @@ class EsphomeNumber(EsphomeEntity[NumberInfo, NumberState], NumberEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
self._client.number_command(self._key, value)
|
||||
self._client.number_command(
|
||||
self._key, value, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -76,7 +76,9 @@ class EsphomeSelect(EsphomeEntity[SelectInfo, SelectState], SelectEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
self._client.select_command(self._key, option)
|
||||
self._client.select_command(
|
||||
self._key, option, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
|
||||
class EsphomeAssistPipelineSelect(EsphomeAssistEntity, AssistPipelineSelect):
|
||||
|
@ -43,12 +43,16 @@ class EsphomeSwitch(EsphomeEntity[SwitchInfo, SwitchState], SwitchEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
self._client.switch_command(self._key, True)
|
||||
self._client.switch_command(
|
||||
self._key, True, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
self._client.switch_command(self._key, False)
|
||||
self._client.switch_command(
|
||||
self._key, False, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -50,7 +50,9 @@ class EsphomeText(EsphomeEntity[TextInfo, TextState], TextEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_value(self, value: str) -> None:
|
||||
"""Update the current value."""
|
||||
self._client.text_command(self._key, value)
|
||||
self._client.text_command(
|
||||
self._key, value, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -28,7 +28,13 @@ class EsphomeTime(EsphomeEntity[TimeInfo, TimeState], TimeEntity):
|
||||
|
||||
async def async_set_value(self, value: time) -> None:
|
||||
"""Update the current time."""
|
||||
self._client.time_command(self._key, value.hour, value.minute, value.second)
|
||||
self._client.time_command(
|
||||
self._key,
|
||||
value.hour,
|
||||
value.minute,
|
||||
value.second,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -334,11 +334,19 @@ class ESPHomeUpdateEntity(EsphomeEntity[UpdateInfo, UpdateState], UpdateEntity):
|
||||
async def async_update(self) -> None:
|
||||
"""Command device to check for update."""
|
||||
if self.available:
|
||||
self._client.update_command(key=self._key, command=UpdateCommand.CHECK)
|
||||
self._client.update_command(
|
||||
key=self._key,
|
||||
command=UpdateCommand.CHECK,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
"""Command device to install update."""
|
||||
self._client.update_command(key=self._key, command=UpdateCommand.INSTALL)
|
||||
self._client.update_command(
|
||||
key=self._key,
|
||||
command=UpdateCommand.INSTALL,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
@ -72,22 +72,32 @@ class EsphomeValve(EsphomeEntity[ValveInfo, ValveState], ValveEntity):
|
||||
@convert_api_error_ha_error
|
||||
async def async_open_valve(self, **kwargs: Any) -> None:
|
||||
"""Open the valve."""
|
||||
self._client.valve_command(key=self._key, position=1.0)
|
||||
self._client.valve_command(
|
||||
key=self._key, position=1.0, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_close_valve(self, **kwargs: Any) -> None:
|
||||
"""Close valve."""
|
||||
self._client.valve_command(key=self._key, position=0.0)
|
||||
self._client.valve_command(
|
||||
key=self._key, position=0.0, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_stop_valve(self, **kwargs: Any) -> None:
|
||||
"""Stop the valve."""
|
||||
self._client.valve_command(key=self._key, stop=True)
|
||||
self._client.valve_command(
|
||||
key=self._key, stop=True, device_id=self._static_info.device_id
|
||||
)
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_set_valve_position(self, position: float) -> None:
|
||||
"""Move the valve to a specific position."""
|
||||
self._client.valve_command(key=self._key, position=position / 100)
|
||||
self._client.valve_command(
|
||||
key=self._key,
|
||||
position=position / 100,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
|
@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250702.1"]
|
||||
"requirements": ["home-assistant-frontend==20250702.2"]
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""The generic_hygrostat component."""
|
||||
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.humidifier import HumidifierDeviceClass
|
||||
@ -16,7 +18,10 @@ from homeassistant.helpers.device import (
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.event import async_track_entity_registry_updated_event
|
||||
from homeassistant.helpers.helper_integration import async_handle_source_entity_changes
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
DOMAIN = "generic_hygrostat"
|
||||
@ -70,6 +75,8 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Generic Hygrostat component."""
|
||||
@ -89,6 +96,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass,
|
||||
entry.entry_id,
|
||||
@ -101,23 +109,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
|
||||
)
|
||||
|
||||
async def source_entity_removed() -> None:
|
||||
# The source entity has been removed, we need to clean the device links.
|
||||
async_remove_stale_devices_links_keep_entity_device(hass, entry.entry_id, None)
|
||||
|
||||
entry.async_on_unload(
|
||||
# We use async_handle_source_entity_changes to track changes to the humidifer,
|
||||
# but not the humidity sensor because the generic_hygrostat adds itself to the
|
||||
# humidifier's device.
|
||||
async_handle_source_entity_changes(
|
||||
hass,
|
||||
add_helper_config_entry_to_device=False,
|
||||
helper_config_entry_id=entry.entry_id,
|
||||
set_source_entity_id_or_uuid=set_humidifier_entity_id_or_uuid,
|
||||
source_device_id=async_entity_id_to_device_id(
|
||||
hass, entry.options[CONF_HUMIDIFIER]
|
||||
),
|
||||
source_entity_id_or_uuid=entry.options[CONF_HUMIDIFIER],
|
||||
source_entity_removed=source_entity_removed,
|
||||
)
|
||||
)
|
||||
|
||||
@ -148,6 +152,40 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s.%s", config_entry.version, config_entry.minor_version
|
||||
)
|
||||
|
||||
if config_entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
if config_entry.version == 1:
|
||||
options = {**config_entry.options}
|
||||
if config_entry.minor_version < 2:
|
||||
# Remove the generic_hygrostat config entry from the source device
|
||||
if source_device_id := async_entity_id_to_device_id(
|
||||
hass, options[CONF_HUMIDIFIER]
|
||||
):
|
||||
async_remove_helper_config_entry_from_source_device(
|
||||
hass,
|
||||
helper_config_entry_id=config_entry.entry_id,
|
||||
source_device_id=source_device_id,
|
||||
)
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, options=options, minor_version=2
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
@ -92,6 +92,8 @@ OPTIONS_FLOW = {
|
||||
class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
"""Handle a config or options flow."""
|
||||
|
||||
MINOR_VERSION = 2
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
|
||||
|
@ -42,7 +42,7 @@ from homeassistant.core import (
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import condition, config_validation as cv
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device import async_entity_id_to_device
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -145,22 +145,22 @@ async def _async_setup_config(
|
||||
[
|
||||
GenericHygrostat(
|
||||
hass,
|
||||
name,
|
||||
switch_entity_id,
|
||||
sensor_entity_id,
|
||||
min_humidity,
|
||||
max_humidity,
|
||||
target_humidity,
|
||||
device_class,
|
||||
min_cycle_duration,
|
||||
dry_tolerance,
|
||||
wet_tolerance,
|
||||
keep_alive,
|
||||
initial_state,
|
||||
away_humidity,
|
||||
away_fixed,
|
||||
sensor_stale_duration,
|
||||
unique_id,
|
||||
name=name,
|
||||
switch_entity_id=switch_entity_id,
|
||||
sensor_entity_id=sensor_entity_id,
|
||||
min_humidity=min_humidity,
|
||||
max_humidity=max_humidity,
|
||||
target_humidity=target_humidity,
|
||||
device_class=device_class,
|
||||
min_cycle_duration=min_cycle_duration,
|
||||
dry_tolerance=dry_tolerance,
|
||||
wet_tolerance=wet_tolerance,
|
||||
keep_alive=keep_alive,
|
||||
initial_state=initial_state,
|
||||
away_humidity=away_humidity,
|
||||
away_fixed=away_fixed,
|
||||
sensor_stale_duration=sensor_stale_duration,
|
||||
unique_id=unique_id,
|
||||
)
|
||||
]
|
||||
)
|
||||
@ -174,6 +174,7 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
name: str,
|
||||
switch_entity_id: str,
|
||||
sensor_entity_id: str,
|
||||
@ -195,7 +196,7 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity):
|
||||
self._name = name
|
||||
self._switch_entity_id = switch_entity_id
|
||||
self._sensor_entity_id = sensor_entity_id
|
||||
self._attr_device_info = async_device_info_to_link_from_entity(
|
||||
self.device_entry = async_entity_id_to_device(
|
||||
hass,
|
||||
switch_entity_id,
|
||||
)
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""The generic_thermostat component."""
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@ -8,14 +10,20 @@ from homeassistant.helpers.device import (
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.event import async_track_entity_registry_updated_event
|
||||
from homeassistant.helpers.helper_integration import async_handle_source_entity_changes
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
)
|
||||
|
||||
from .const import CONF_HEATER, CONF_SENSOR, PLATFORMS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass,
|
||||
entry.entry_id,
|
||||
@ -28,23 +36,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
options={**entry.options, CONF_HEATER: source_entity_id},
|
||||
)
|
||||
|
||||
async def source_entity_removed() -> None:
|
||||
# The source entity has been removed, we need to clean the device links.
|
||||
async_remove_stale_devices_links_keep_entity_device(hass, entry.entry_id, None)
|
||||
|
||||
entry.async_on_unload(
|
||||
# We use async_handle_source_entity_changes to track changes to the heater, but
|
||||
# not the temperature sensor because the generic_hygrostat adds itself to the
|
||||
# heater's device.
|
||||
async_handle_source_entity_changes(
|
||||
hass,
|
||||
add_helper_config_entry_to_device=False,
|
||||
helper_config_entry_id=entry.entry_id,
|
||||
set_source_entity_id_or_uuid=set_humidifier_entity_id_or_uuid,
|
||||
source_device_id=async_entity_id_to_device_id(
|
||||
hass, entry.options[CONF_HEATER]
|
||||
),
|
||||
source_entity_id_or_uuid=entry.options[CONF_HEATER],
|
||||
source_entity_removed=source_entity_removed,
|
||||
)
|
||||
)
|
||||
|
||||
@ -75,6 +79,40 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s.%s", config_entry.version, config_entry.minor_version
|
||||
)
|
||||
|
||||
if config_entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
if config_entry.version == 1:
|
||||
options = {**config_entry.options}
|
||||
if config_entry.minor_version < 2:
|
||||
# Remove the generic_thermostat config entry from the source device
|
||||
if source_device_id := async_entity_id_to_device_id(
|
||||
hass, options[CONF_HEATER]
|
||||
):
|
||||
async_remove_helper_config_entry_from_source_device(
|
||||
hass,
|
||||
helper_config_entry_id=config_entry.entry_id,
|
||||
source_device_id=source_device_id,
|
||||
)
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, options=options, minor_version=2
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
@ -48,7 +48,7 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.exceptions import ConditionError
|
||||
from homeassistant.helpers import condition, config_validation as cv
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device import async_entity_id_to_device
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -182,23 +182,23 @@ async def _async_setup_config(
|
||||
[
|
||||
GenericThermostat(
|
||||
hass,
|
||||
name,
|
||||
heater_entity_id,
|
||||
sensor_entity_id,
|
||||
min_temp,
|
||||
max_temp,
|
||||
target_temp,
|
||||
ac_mode,
|
||||
min_cycle_duration,
|
||||
cold_tolerance,
|
||||
hot_tolerance,
|
||||
keep_alive,
|
||||
initial_hvac_mode,
|
||||
presets,
|
||||
precision,
|
||||
target_temperature_step,
|
||||
unit,
|
||||
unique_id,
|
||||
name=name,
|
||||
heater_entity_id=heater_entity_id,
|
||||
sensor_entity_id=sensor_entity_id,
|
||||
min_temp=min_temp,
|
||||
max_temp=max_temp,
|
||||
target_temp=target_temp,
|
||||
ac_mode=ac_mode,
|
||||
min_cycle_duration=min_cycle_duration,
|
||||
cold_tolerance=cold_tolerance,
|
||||
hot_tolerance=hot_tolerance,
|
||||
keep_alive=keep_alive,
|
||||
initial_hvac_mode=initial_hvac_mode,
|
||||
presets=presets,
|
||||
precision=precision,
|
||||
target_temperature_step=target_temperature_step,
|
||||
unit=unit,
|
||||
unique_id=unique_id,
|
||||
)
|
||||
]
|
||||
)
|
||||
@ -212,6 +212,7 @@ class GenericThermostat(ClimateEntity, RestoreEntity):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
name: str,
|
||||
heater_entity_id: str,
|
||||
sensor_entity_id: str,
|
||||
@ -234,7 +235,7 @@ class GenericThermostat(ClimateEntity, RestoreEntity):
|
||||
self._attr_name = name
|
||||
self.heater_entity_id = heater_entity_id
|
||||
self.sensor_entity_id = sensor_entity_id
|
||||
self._attr_device_info = async_device_info_to_link_from_entity(
|
||||
self.device_entry = async_entity_id_to_device(
|
||||
hass,
|
||||
heater_entity_id,
|
||||
)
|
||||
|
@ -100,6 +100,8 @@ OPTIONS_FLOW = {
|
||||
class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
"""Handle a config or options flow."""
|
||||
|
||||
MINOR_VERSION = 2
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
|
||||
|
@ -127,7 +127,7 @@ class GoogleCloudSpeechToTextEntity(SpeechToTextEntity):
|
||||
try:
|
||||
responses = await self._client.streaming_recognize(
|
||||
requests=request_generator(),
|
||||
timeout=10,
|
||||
timeout=30,
|
||||
retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0),
|
||||
)
|
||||
|
||||
|
@ -218,7 +218,7 @@ class BaseGoogleCloudProvider:
|
||||
|
||||
response = await self._client.synthesize_speech(
|
||||
request,
|
||||
timeout=10,
|
||||
timeout=30,
|
||||
retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0),
|
||||
)
|
||||
|
||||
|
@ -195,11 +195,15 @@ async def async_update_options(
|
||||
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
"""Migrate integration entry structure."""
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
# Make sure we get enabled config entries first
|
||||
entries = sorted(
|
||||
hass.config_entries.async_entries(DOMAIN),
|
||||
key=lambda e: e.disabled_by is not None,
|
||||
)
|
||||
if not any(entry.version == 1 for entry in entries):
|
||||
return
|
||||
|
||||
api_keys_entries: dict[str, ConfigEntry] = {}
|
||||
api_keys_entries: dict[str, tuple[ConfigEntry, bool]] = {}
|
||||
entity_registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
@ -213,9 +217,14 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
)
|
||||
if entry.data[CONF_API_KEY] not in api_keys_entries:
|
||||
use_existing = True
|
||||
api_keys_entries[entry.data[CONF_API_KEY]] = entry
|
||||
all_disabled = all(
|
||||
e.disabled_by is not None
|
||||
for e in entries
|
||||
if e.data[CONF_API_KEY] == entry.data[CONF_API_KEY]
|
||||
)
|
||||
api_keys_entries[entry.data[CONF_API_KEY]] = (entry, all_disabled)
|
||||
|
||||
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]
|
||||
parent_entry, all_disabled = api_keys_entries[entry.data[CONF_API_KEY]]
|
||||
|
||||
hass.config_entries.async_add_subentry(parent_entry, subentry)
|
||||
if use_existing:
|
||||
@ -228,25 +237,51 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
unique_id=None,
|
||||
),
|
||||
)
|
||||
conversation_entity = entity_registry.async_get_entity_id(
|
||||
conversation_entity_id = entity_registry.async_get_entity_id(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
entry.entry_id,
|
||||
)
|
||||
if conversation_entity is not None:
|
||||
entity_registry.async_update_entity(
|
||||
conversation_entity,
|
||||
config_entry_id=parent_entry.entry_id,
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
new_unique_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, entry.entry_id)}
|
||||
)
|
||||
|
||||
if conversation_entity_id is not None:
|
||||
conversation_entity_entry = entity_registry.entities[conversation_entity_id]
|
||||
entity_disabled_by = conversation_entity_entry.disabled_by
|
||||
if (
|
||||
entity_disabled_by is er.RegistryEntryDisabler.CONFIG_ENTRY
|
||||
and not all_disabled
|
||||
):
|
||||
# Device and entity registries don't update the disabled_by flag
|
||||
# when moving a device or entity from one config entry to another,
|
||||
# so we need to do it manually.
|
||||
entity_disabled_by = (
|
||||
er.RegistryEntryDisabler.DEVICE
|
||||
if device
|
||||
else er.RegistryEntryDisabler.USER
|
||||
)
|
||||
entity_registry.async_update_entity(
|
||||
conversation_entity_id,
|
||||
config_entry_id=parent_entry.entry_id,
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
disabled_by=entity_disabled_by,
|
||||
new_unique_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
if device is not None:
|
||||
# Device and entity registries don't update the disabled_by flag when
|
||||
# moving a device or entity from one config entry to another, so we
|
||||
# need to do it manually.
|
||||
device_disabled_by = device.disabled_by
|
||||
if (
|
||||
device.disabled_by is dr.DeviceEntryDisabler.CONFIG_ENTRY
|
||||
and not all_disabled
|
||||
):
|
||||
device_disabled_by = dr.DeviceEntryDisabler.USER
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
disabled_by=device_disabled_by,
|
||||
new_identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
add_config_subentry_id=subentry.subentry_id,
|
||||
add_config_entry_id=parent_entry.entry_id,
|
||||
@ -266,12 +301,13 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
if not use_existing:
|
||||
await hass.config_entries.async_remove(entry.entry_id)
|
||||
else:
|
||||
_add_ai_task_subentry(hass, entry)
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
title=DEFAULT_TITLE,
|
||||
options={},
|
||||
version=2,
|
||||
minor_version=2,
|
||||
minor_version=4,
|
||||
)
|
||||
|
||||
|
||||
@ -315,19 +351,58 @@ async def async_migrate_entry(
|
||||
|
||||
if entry.version == 2 and entry.minor_version == 2:
|
||||
# Add AI Task subentry with default options
|
||||
hass.config_entries.async_add_subentry(
|
||||
entry,
|
||||
ConfigSubentry(
|
||||
data=MappingProxyType(RECOMMENDED_AI_TASK_OPTIONS),
|
||||
subentry_type="ai_task_data",
|
||||
title=DEFAULT_AI_TASK_NAME,
|
||||
unique_id=None,
|
||||
),
|
||||
)
|
||||
_add_ai_task_subentry(hass, entry)
|
||||
hass.config_entries.async_update_entry(entry, minor_version=3)
|
||||
|
||||
if entry.version == 2 and entry.minor_version == 3:
|
||||
# Fix migration where the disabled_by flag was not set correctly.
|
||||
# We can currently only correct this for enabled config entries,
|
||||
# because migration does not run for disabled config entries. This
|
||||
# is asserted in tests, and if that behavior is changed, we should
|
||||
# correct also disabled config entries.
|
||||
device_registry = dr.async_get(hass)
|
||||
entity_registry = er.async_get(hass)
|
||||
devices = dr.async_entries_for_config_entry(device_registry, entry.entry_id)
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, entry.entry_id
|
||||
)
|
||||
if entry.disabled_by is None:
|
||||
# If the config entry is not disabled, we need to set the disabled_by
|
||||
# flag on devices to USER, and on entities to DEVICE, if they are set
|
||||
# to CONFIG_ENTRY.
|
||||
for device in devices:
|
||||
if device.disabled_by is not dr.DeviceEntryDisabler.CONFIG_ENTRY:
|
||||
continue
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
disabled_by=dr.DeviceEntryDisabler.USER,
|
||||
)
|
||||
for entity in entity_entries:
|
||||
if entity.disabled_by is not er.RegistryEntryDisabler.CONFIG_ENTRY:
|
||||
continue
|
||||
entity_registry.async_update_entity(
|
||||
entity.entity_id,
|
||||
disabled_by=er.RegistryEntryDisabler.DEVICE,
|
||||
)
|
||||
hass.config_entries.async_update_entry(entry, minor_version=4)
|
||||
|
||||
LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _add_ai_task_subentry(
|
||||
hass: HomeAssistant, entry: GoogleGenerativeAIConfigEntry
|
||||
) -> None:
|
||||
"""Add AI Task subentry to the config entry."""
|
||||
hass.config_entries.async_add_subentry(
|
||||
entry,
|
||||
ConfigSubentry(
|
||||
data=MappingProxyType(RECOMMENDED_AI_TASK_OPTIONS),
|
||||
subentry_type="ai_task_data",
|
||||
title=DEFAULT_AI_TASK_NAME,
|
||||
unique_id=None,
|
||||
),
|
||||
)
|
||||
|
@ -37,7 +37,10 @@ class GoogleGenerativeAITaskEntity(
|
||||
):
|
||||
"""Google Generative AI AI Task entity."""
|
||||
|
||||
_attr_supported_features = ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
_attr_supported_features = (
|
||||
ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
|
||||
)
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
|
@ -97,7 +97,7 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Google Generative AI Conversation."""
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 3
|
||||
MINOR_VERSION = 4
|
||||
|
||||
async def async_step_api(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
@ -25,7 +25,7 @@ RECOMMENDED_TOP_P = 0.95
|
||||
CONF_TOP_K = "top_k"
|
||||
RECOMMENDED_TOP_K = 64
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 1500
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD = "harassment_block_threshold"
|
||||
CONF_HATE_BLOCK_THRESHOLD = "hate_block_threshold"
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD = "sexual_block_threshold"
|
||||
|
@ -8,7 +8,7 @@ from collections.abc import AsyncGenerator, Callable
|
||||
from dataclasses import replace
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
from typing import Any, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from google.genai import Client
|
||||
from google.genai.errors import APIError, ClientError
|
||||
@ -31,7 +31,7 @@ import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
@ -60,6 +60,9 @@ from .const import (
|
||||
TIMEOUT_MILLIS,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import GoogleGenerativeAIConfigEntry
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
@ -313,7 +316,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: ConfigEntry,
|
||||
entry: GoogleGenerativeAIConfigEntry,
|
||||
subentry: ConfigSubentry,
|
||||
default_model: str = RECOMMENDED_CHAT_MODEL,
|
||||
) -> None:
|
||||
@ -438,6 +441,14 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
user_message = chat_log.content[-1]
|
||||
assert isinstance(user_message, conversation.UserContent)
|
||||
chat_request: str | list[Part] = user_message.content
|
||||
if user_message.attachments:
|
||||
files = await async_prepare_files_for_prompt(
|
||||
self.hass,
|
||||
self._genai_client,
|
||||
[a.path for a in user_message.attachments],
|
||||
)
|
||||
chat_request = [chat_request, *files]
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
@ -508,7 +519,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
async def async_prepare_files_for_prompt(
|
||||
hass: HomeAssistant, client: Client, files: list[Path]
|
||||
) -> list[File]:
|
||||
"""Append files to a prompt.
|
||||
"""Upload files so they can be attached to a prompt.
|
||||
|
||||
Caller needs to ensure that the files are allowed.
|
||||
"""
|
||||
|
@ -24,6 +24,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.service_info.ssdp import SsdpServiceInfo
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN, ENTRY_TITLE
|
||||
from .coordinator import HeosConfigEntry
|
||||
@ -142,51 +143,16 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if TYPE_CHECKING:
|
||||
assert discovery_info.ssdp_location
|
||||
|
||||
entry: HeosConfigEntry | None = await self.async_set_unique_id(DOMAIN)
|
||||
hostname = urlparse(discovery_info.ssdp_location).hostname
|
||||
assert hostname is not None
|
||||
|
||||
# Abort early when discovery is ignored or host is part of the current system
|
||||
if entry and (
|
||||
entry.source == SOURCE_IGNORE or hostname in _get_current_hosts(entry)
|
||||
):
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
return await self._async_handle_discovered(hostname)
|
||||
|
||||
# Connect to discovered host and get system information
|
||||
heos = Heos(HeosOptions(hostname, events=False, heart_beat=False))
|
||||
try:
|
||||
await heos.connect()
|
||||
system_info = await heos.get_system_info()
|
||||
except HeosError as error:
|
||||
_LOGGER.debug(
|
||||
"Failed to retrieve system information from discovered HEOS device %s",
|
||||
hostname,
|
||||
exc_info=error,
|
||||
)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
finally:
|
||||
await heos.disconnect()
|
||||
|
||||
# Select the preferred host, if available
|
||||
if system_info.preferred_hosts:
|
||||
hostname = system_info.preferred_hosts[0].ip_address
|
||||
|
||||
# Move to confirmation when not configured
|
||||
if entry is None:
|
||||
self._discovered_host = hostname
|
||||
return await self.async_step_confirm_discovery()
|
||||
|
||||
# Only update if the configured host isn't part of the discovered hosts to ensure new players that come online don't trigger a reload
|
||||
if entry.data[CONF_HOST] not in [host.ip_address for host in system_info.hosts]:
|
||||
_LOGGER.debug(
|
||||
"Updated host %s to discovered host %s", entry.data[CONF_HOST], hostname
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates={CONF_HOST: hostname},
|
||||
reason="reconfigure_successful",
|
||||
)
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
return await self._async_handle_discovered(discovery_info.host)
|
||||
|
||||
async def async_step_confirm_discovery(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -267,6 +233,50 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
)
|
||||
|
||||
async def _async_handle_discovered(self, hostname: str) -> ConfigFlowResult:
|
||||
entry: HeosConfigEntry | None = await self.async_set_unique_id(DOMAIN)
|
||||
# Abort early when discovery is ignored or host is part of the current system
|
||||
if entry and (
|
||||
entry.source == SOURCE_IGNORE or hostname in _get_current_hosts(entry)
|
||||
):
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
# Connect to discovered host and get system information
|
||||
heos = Heos(HeosOptions(hostname, events=False, heart_beat=False))
|
||||
try:
|
||||
await heos.connect()
|
||||
system_info = await heos.get_system_info()
|
||||
except HeosError as error:
|
||||
_LOGGER.debug(
|
||||
"Failed to retrieve system information from discovered HEOS device %s",
|
||||
hostname,
|
||||
exc_info=error,
|
||||
)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
finally:
|
||||
await heos.disconnect()
|
||||
|
||||
# Select the preferred host, if available
|
||||
if system_info.preferred_hosts and system_info.preferred_hosts[0].ip_address:
|
||||
hostname = system_info.preferred_hosts[0].ip_address
|
||||
|
||||
# Move to confirmation when not configured
|
||||
if entry is None:
|
||||
self._discovered_host = hostname
|
||||
return await self.async_step_confirm_discovery()
|
||||
|
||||
# Only update if the configured host isn't part of the discovered hosts to ensure new players that come online don't trigger a reload
|
||||
if entry.data[CONF_HOST] not in [host.ip_address for host in system_info.hosts]:
|
||||
_LOGGER.debug(
|
||||
"Updated host %s to discovered host %s", entry.data[CONF_HOST], hostname
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates={CONF_HOST: hostname},
|
||||
reason="reconfigure_successful",
|
||||
)
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
|
||||
class HeosOptionsFlowHandler(OptionsFlow):
|
||||
"""Define HEOS options flow."""
|
||||
|
@ -13,5 +13,6 @@
|
||||
{
|
||||
"st": "urn:schemas-denon-com:device:ACT-Denon:1"
|
||||
}
|
||||
]
|
||||
],
|
||||
"zeroconf": ["_heos-audio._tcp.local."]
|
||||
}
|
||||
|
@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_STATE
|
||||
@ -11,7 +12,10 @@ from homeassistant.helpers.device import (
|
||||
async_entity_id_to_device_id,
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.helper_integration import async_handle_source_entity_changes
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
)
|
||||
from homeassistant.helpers.template import Template
|
||||
|
||||
from .const import CONF_DURATION, CONF_END, CONF_START, PLATFORMS
|
||||
@ -20,6 +24,8 @@ from .data import HistoryStats
|
||||
|
||||
type HistoryStatsConfigEntry = ConfigEntry[HistoryStatsUpdateCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: HistoryStatsConfigEntry
|
||||
@ -47,6 +53,7 @@ async def async_setup_entry(
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass,
|
||||
entry.entry_id,
|
||||
@ -67,6 +74,7 @@ async def async_setup_entry(
|
||||
entry.async_on_unload(
|
||||
async_handle_source_entity_changes(
|
||||
hass,
|
||||
add_helper_config_entry_to_device=False,
|
||||
helper_config_entry_id=entry.entry_id,
|
||||
set_source_entity_id_or_uuid=set_source_entity_id_or_uuid,
|
||||
source_device_id=async_entity_id_to_device_id(
|
||||
@ -83,6 +91,40 @@ async def async_setup_entry(
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s.%s", config_entry.version, config_entry.minor_version
|
||||
)
|
||||
|
||||
if config_entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
if config_entry.version == 1:
|
||||
options = {**config_entry.options}
|
||||
if config_entry.minor_version < 2:
|
||||
# Remove the history_stats config entry from the source device
|
||||
if source_device_id := async_entity_id_to_device_id(
|
||||
hass, options[CONF_ENTITY_ID]
|
||||
):
|
||||
async_remove_helper_config_entry_from_source_device(
|
||||
hass,
|
||||
helper_config_entry_id=config_entry.entry_id,
|
||||
source_device_id=source_device_id,
|
||||
)
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, options=options, minor_version=2
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: HistoryStatsConfigEntry
|
||||
) -> bool:
|
||||
|
@ -124,6 +124,8 @@ OPTIONS_FLOW = {
|
||||
class HistoryStatsConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
"""Handle a config flow for History stats."""
|
||||
|
||||
MINOR_VERSION = 2
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
|
||||
@ -229,7 +231,12 @@ async def ws_start_preview(
|
||||
coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, None, name, True)
|
||||
await coordinator.async_refresh()
|
||||
preview_entity = HistoryStatsSensor(
|
||||
hass, coordinator, sensor_type, name, None, entity_id
|
||||
hass,
|
||||
coordinator=coordinator,
|
||||
sensor_type=sensor_type,
|
||||
name=name,
|
||||
unique_id=None,
|
||||
source_entity_id=entity_id,
|
||||
)
|
||||
preview_entity.hass = hass
|
||||
|
||||
|
@ -27,7 +27,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device import async_entity_id_to_device
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -113,7 +113,16 @@ async def async_setup_platform(
|
||||
if not coordinator.last_update_success:
|
||||
raise PlatformNotReady from coordinator.last_exception
|
||||
async_add_entities(
|
||||
[HistoryStatsSensor(hass, coordinator, sensor_type, name, unique_id, entity_id)]
|
||||
[
|
||||
HistoryStatsSensor(
|
||||
hass,
|
||||
coordinator=coordinator,
|
||||
sensor_type=sensor_type,
|
||||
name=name,
|
||||
unique_id=unique_id,
|
||||
source_entity_id=entity_id,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@ -130,7 +139,12 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
[
|
||||
HistoryStatsSensor(
|
||||
hass, coordinator, sensor_type, entry.title, entry.entry_id, entity_id
|
||||
hass,
|
||||
coordinator=coordinator,
|
||||
sensor_type=sensor_type,
|
||||
name=entry.title,
|
||||
unique_id=entry.entry_id,
|
||||
source_entity_id=entity_id,
|
||||
)
|
||||
]
|
||||
)
|
||||
@ -176,6 +190,7 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
coordinator: HistoryStatsUpdateCoordinator,
|
||||
sensor_type: str,
|
||||
name: str,
|
||||
@ -190,10 +205,11 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
self._attr_native_unit_of_measurement = UNITS[sensor_type]
|
||||
self._type = sensor_type
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = async_device_info_to_link_from_entity(
|
||||
hass,
|
||||
source_entity_id,
|
||||
)
|
||||
if source_entity_id: # Guard against empty source_entity_id in preview mode
|
||||
self.device_entry = async_entity_id_to_device(
|
||||
hass,
|
||||
source_entity_id,
|
||||
)
|
||||
self._process_update()
|
||||
if self._type == CONF_TYPE_TIME:
|
||||
self._attr_device_class = SensorDeviceClass.DURATION
|
||||
|
@ -38,7 +38,7 @@ from propcache.api import cached_property
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
@ -626,39 +626,37 @@ class HomeConnectCoordinator(
|
||||
"""Check if the appliance data hasn't been refreshed too often recently."""
|
||||
|
||||
now = self.hass.loop.time()
|
||||
if len(self._execution_tracker[appliance_ha_id]) >= MAX_EXECUTIONS:
|
||||
return True
|
||||
|
||||
execution_tracker = self._execution_tracker[appliance_ha_id]
|
||||
initial_len = len(execution_tracker)
|
||||
|
||||
execution_tracker = self._execution_tracker[appliance_ha_id] = [
|
||||
timestamp
|
||||
for timestamp in self._execution_tracker[appliance_ha_id]
|
||||
for timestamp in execution_tracker
|
||||
if now - timestamp < MAX_EXECUTIONS_TIME_WINDOW
|
||||
]
|
||||
|
||||
execution_tracker.append(now)
|
||||
|
||||
if len(execution_tracker) >= MAX_EXECUTIONS:
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"home_connect_too_many_connected_paired_events_{appliance_ha_id}",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="home_connect_too_many_connected_paired_events",
|
||||
data={
|
||||
"entry_id": self.config_entry.entry_id,
|
||||
"appliance_ha_id": appliance_ha_id,
|
||||
},
|
||||
translation_placeholders={
|
||||
"appliance_name": self.data[appliance_ha_id].info.name,
|
||||
"times": str(MAX_EXECUTIONS),
|
||||
"time_window": str(MAX_EXECUTIONS_TIME_WINDOW // 60),
|
||||
"home_connect_resource_url": "https://www.home-connect.com/global/help-support/error-codes#/Togglebox=15362315-13320636-1/",
|
||||
"home_assistant_core_issue_url": "https://github.com/home-assistant/core/issues/147299",
|
||||
},
|
||||
)
|
||||
if initial_len < MAX_EXECUTIONS:
|
||||
_LOGGER.warning(
|
||||
'Too many connected/paired events for appliance "%s" '
|
||||
"(%s times in less than %s minutes), updates have been disabled "
|
||||
"and they will be enabled again whenever the connection stabilizes. "
|
||||
"Consider trying to unplug the appliance "
|
||||
"for a while to perform a soft reset",
|
||||
self.data[appliance_ha_id].info.name,
|
||||
MAX_EXECUTIONS,
|
||||
MAX_EXECUTIONS_TIME_WINDOW // 60,
|
||||
)
|
||||
return True
|
||||
if initial_len >= MAX_EXECUTIONS:
|
||||
_LOGGER.info(
|
||||
'Connected/paired events from the appliance "%s" have stabilized,'
|
||||
" updates have been re-enabled",
|
||||
self.data[appliance_ha_id].info.name,
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
|
@ -124,17 +124,6 @@
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"home_connect_too_many_connected_paired_events": {
|
||||
"title": "{appliance_name} sent too many connected or paired events",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"title": "[%key:component::home_connect::issues::home_connect_too_many_connected_paired_events::title%]",
|
||||
"description": "The appliance \"{appliance_name}\" has been reported as connected or paired {times} times in less than {time_window} minutes, so refreshes on connected or paired events has been disabled to avoid exceeding the API rate limit.\n\nPlease refer to the [Home Connect Wi-Fi requirements and recommendations]({home_connect_resource_url}). If everything seems right with your network configuration, restart the appliance.\n\nClick \"submit\" to re-enable the updates.\nIf the issue persists, please see the following issue in the [Home Assistant core repository]({home_assistant_core_issue_url})."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"deprecated_time_alarm_clock_in_automations_scripts": {
|
||||
"title": "Deprecated alarm clock entity detected in some automations or scripts",
|
||||
"fix_flow": {
|
||||
|
@ -113,9 +113,7 @@ class HomematicipHAP:
|
||||
|
||||
self._ws_close_requested = False
|
||||
self._ws_connection_closed = asyncio.Event()
|
||||
self._retry_task: asyncio.Task | None = None
|
||||
self._tries = 0
|
||||
self._accesspoint_connected = True
|
||||
self._get_state_task: asyncio.Task | None = None
|
||||
self.hmip_device_by_entity_id: dict[str, Any] = {}
|
||||
self.reset_connection_listener: Callable | None = None
|
||||
|
||||
@ -161,17 +159,8 @@ class HomematicipHAP:
|
||||
"""
|
||||
if not self.home.connected:
|
||||
_LOGGER.error("HMIP access point has lost connection with the cloud")
|
||||
self._accesspoint_connected = False
|
||||
self._ws_connection_closed.set()
|
||||
self.set_all_to_unavailable()
|
||||
elif not self._accesspoint_connected:
|
||||
# Now the HOME_CHANGED event has fired indicating the access
|
||||
# point has reconnected to the cloud again.
|
||||
# Explicitly getting an update as entity states might have
|
||||
# changed during access point disconnect."""
|
||||
|
||||
job = self.hass.async_create_task(self.get_state())
|
||||
job.add_done_callback(self.get_state_finished)
|
||||
self._accesspoint_connected = True
|
||||
|
||||
@callback
|
||||
def async_create_entity(self, *args, **kwargs) -> None:
|
||||
@ -185,20 +174,43 @@ class HomematicipHAP:
|
||||
await asyncio.sleep(30)
|
||||
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||
|
||||
async def _try_get_state(self) -> None:
|
||||
"""Call get_state in a loop until no error occurs, using exponential backoff on error."""
|
||||
|
||||
# Wait until WebSocket connection is established.
|
||||
while not self.home.websocket_is_connected():
|
||||
await asyncio.sleep(2)
|
||||
|
||||
delay = 8
|
||||
max_delay = 1500
|
||||
while True:
|
||||
try:
|
||||
await self.get_state()
|
||||
break
|
||||
except HmipConnectionError as err:
|
||||
_LOGGER.warning(
|
||||
"Get_state failed, retrying in %s seconds: %s", delay, err
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
delay = min(delay * 2, max_delay)
|
||||
|
||||
async def get_state(self) -> None:
|
||||
"""Update HMIP state and tell Home Assistant."""
|
||||
await self.home.get_current_state_async()
|
||||
self.update_all()
|
||||
|
||||
def get_state_finished(self, future) -> None:
|
||||
"""Execute when get_state coroutine has finished."""
|
||||
"""Execute when try_get_state coroutine has finished."""
|
||||
try:
|
||||
future.result()
|
||||
except HmipConnectionError:
|
||||
# Somehow connection could not recover. Will disconnect and
|
||||
# so reconnect loop is taking over.
|
||||
_LOGGER.error("Updating state after HMIP access point reconnect failed")
|
||||
self.hass.async_create_task(self.home.disable_events())
|
||||
except Exception as err: # noqa: BLE001
|
||||
_LOGGER.error(
|
||||
"Error updating state after HMIP access point reconnect: %s", err
|
||||
)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Updating state after HMIP access point reconnect finished successfully",
|
||||
)
|
||||
|
||||
def set_all_to_unavailable(self) -> None:
|
||||
"""Set all devices to unavailable and tell Home Assistant."""
|
||||
@ -222,8 +234,8 @@ class HomematicipHAP:
|
||||
async def async_reset(self) -> bool:
|
||||
"""Close the websocket connection."""
|
||||
self._ws_close_requested = True
|
||||
if self._retry_task is not None:
|
||||
self._retry_task.cancel()
|
||||
if self._get_state_task is not None:
|
||||
self._get_state_task.cancel()
|
||||
await self.home.disable_events_async()
|
||||
_LOGGER.debug("Closed connection to HomematicIP cloud server")
|
||||
await self.hass.config_entries.async_unload_platforms(
|
||||
@ -247,7 +259,9 @@ class HomematicipHAP:
|
||||
"""Handle websocket connected."""
|
||||
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
|
||||
if self._ws_connection_closed.is_set():
|
||||
await self.get_state()
|
||||
self._get_state_task = self.hass.async_create_task(self._try_get_state())
|
||||
self._get_state_task.add_done_callback(self.get_state_finished)
|
||||
|
||||
self._ws_connection_closed.clear()
|
||||
|
||||
async def ws_disconnected_handler(self) -> None:
|
||||
@ -256,11 +270,12 @@ class HomematicipHAP:
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def ws_reconnected_handler(self, reason: str) -> None:
|
||||
"""Handle websocket reconnection."""
|
||||
"""Handle websocket reconnection. Is called when Websocket tries to reconnect."""
|
||||
_LOGGER.info(
|
||||
"Websocket connection to HomematicIP Cloud re-established due to reason: %s",
|
||||
"Websocket connection to HomematicIP Cloud trying to reconnect due to reason: %s",
|
||||
reason,
|
||||
)
|
||||
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def get_hap(
|
||||
|
@ -2,13 +2,20 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homematicip.base.enums import DeviceType, OpticalSignalBehaviour, RGBColorState
|
||||
from homematicip.base.enums import (
|
||||
DeviceType,
|
||||
FunctionalChannelType,
|
||||
OpticalSignalBehaviour,
|
||||
RGBColorState,
|
||||
)
|
||||
from homematicip.base.functionalChannels import NotificationLightChannel
|
||||
from homematicip.device import (
|
||||
BrandDimmer,
|
||||
BrandSwitchNotificationLight,
|
||||
Device,
|
||||
Dimmer,
|
||||
DinRailDimmer3,
|
||||
FullFlushDimmer,
|
||||
@ -34,6 +41,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from .entity import HomematicipGenericEntity
|
||||
from .hap import HomematicIPConfigEntry, HomematicipHAP
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@ -43,6 +52,14 @@ async def async_setup_entry(
|
||||
"""Set up the HomematicIP Cloud lights from a config entry."""
|
||||
hap = config_entry.runtime_data
|
||||
entities: list[HomematicipGenericEntity] = []
|
||||
|
||||
entities.extend(
|
||||
HomematicipLightHS(hap, d, ch.index)
|
||||
for d in hap.home.devices
|
||||
for ch in d.functionalChannels
|
||||
if ch.functionalChannelType == FunctionalChannelType.UNIVERSAL_LIGHT_CHANNEL
|
||||
)
|
||||
|
||||
for device in hap.home.devices:
|
||||
if (
|
||||
isinstance(device, SwitchMeasuring)
|
||||
@ -104,6 +121,64 @@ class HomematicipLight(HomematicipGenericEntity, LightEntity):
|
||||
await self._device.turn_off_async()
|
||||
|
||||
|
||||
class HomematicipLightHS(HomematicipGenericEntity, LightEntity):
|
||||
"""Representation of the HomematicIP light with HS color mode."""
|
||||
|
||||
_attr_color_mode = ColorMode.HS
|
||||
_attr_supported_color_modes = {ColorMode.HS}
|
||||
|
||||
def __init__(self, hap: HomematicipHAP, device: Device, channel_index: int) -> None:
|
||||
"""Initialize the light entity."""
|
||||
super().__init__(hap, device, channel=channel_index, is_multi_channel=True)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if light is on."""
|
||||
return self.functional_channel.on
|
||||
|
||||
@property
|
||||
def brightness(self) -> int | None:
|
||||
"""Return the current brightness."""
|
||||
return int(self.functional_channel.dimLevel * 255.0)
|
||||
|
||||
@property
|
||||
def hs_color(self) -> tuple[float, float] | None:
|
||||
"""Return the hue and saturation color value [float, float]."""
|
||||
if (
|
||||
self.functional_channel.hue is None
|
||||
or self.functional_channel.saturationLevel is None
|
||||
):
|
||||
return None
|
||||
return (
|
||||
self.functional_channel.hue,
|
||||
self.functional_channel.saturationLevel * 100.0,
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on."""
|
||||
|
||||
hs_color = kwargs.get(ATTR_HS_COLOR, (0.0, 0.0))
|
||||
hue = hs_color[0] % 360.0
|
||||
saturation = hs_color[1] / 100.0
|
||||
dim_level = round(kwargs.get(ATTR_BRIGHTNESS, 255) / 255.0, 2)
|
||||
|
||||
if ATTR_HS_COLOR not in kwargs:
|
||||
hue = self.functional_channel.hue
|
||||
saturation = self.functional_channel.saturationLevel
|
||||
|
||||
if ATTR_BRIGHTNESS not in kwargs:
|
||||
# If no brightness is set, use the current brightness
|
||||
dim_level = self.functional_channel.dimLevel or 1.0
|
||||
|
||||
await self.functional_channel.set_hue_saturation_dim_level_async(
|
||||
hue=hue, saturation_level=saturation, dim_level=dim_level
|
||||
)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
await self.functional_channel.set_switch_state_async(on=False)
|
||||
|
||||
|
||||
class HomematicipLightMeasuring(HomematicipLight):
|
||||
"""Representation of the HomematicIP measuring light."""
|
||||
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.0.6"]
|
||||
"requirements": ["homematicip==2.0.7"]
|
||||
}
|
||||
|
@ -18,6 +18,9 @@ from homematicip.device import (
|
||||
PrintedCircuitBoardSwitch2,
|
||||
PrintedCircuitBoardSwitchBattery,
|
||||
SwitchMeasuring,
|
||||
WiredInput32,
|
||||
WiredInputSwitch6,
|
||||
WiredSwitch4,
|
||||
WiredSwitch8,
|
||||
)
|
||||
from homematicip.group import ExtendedLinkedSwitchingGroup, SwitchingGroup
|
||||
@ -51,6 +54,7 @@ async def async_setup_entry(
|
||||
elif isinstance(
|
||||
device,
|
||||
(
|
||||
WiredSwitch4,
|
||||
WiredSwitch8,
|
||||
OpenCollector8Module,
|
||||
BrandSwitch2,
|
||||
@ -60,6 +64,8 @@ async def async_setup_entry(
|
||||
MotionDetectorSwitchOutdoor,
|
||||
DinRailSwitch,
|
||||
DinRailSwitch4,
|
||||
WiredInput32,
|
||||
WiredInputSwitch6,
|
||||
),
|
||||
):
|
||||
channel_indices = [
|
||||
|
@ -29,7 +29,7 @@
|
||||
"services": {
|
||||
"dismiss": {
|
||||
"name": "Dismiss",
|
||||
"description": "Dismisses a html5 notification.",
|
||||
"description": "Dismisses an HTML5 notification.",
|
||||
"fields": {
|
||||
"target": {
|
||||
"name": "Target",
|
||||
|
@ -64,7 +64,7 @@ def setup_bans(hass: HomeAssistant, app: Application, login_threshold: int) -> N
|
||||
"""Initialize bans when app starts up."""
|
||||
await app[KEY_BAN_MANAGER].async_load()
|
||||
|
||||
app.on_startup.append(ban_startup) # type: ignore[arg-type]
|
||||
app.on_startup.append(ban_startup)
|
||||
|
||||
|
||||
@middleware
|
||||
|
@ -11,9 +11,9 @@ from aiopvapi.shades import Shades
|
||||
from homeassistant.const import CONF_API_VERSION, CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .const import DOMAIN, HUB_EXCEPTIONS
|
||||
from .const import DOMAIN, HUB_EXCEPTIONS, MANUFACTURER
|
||||
from .coordinator import PowerviewShadeUpdateCoordinator
|
||||
from .model import PowerviewConfigEntry, PowerviewEntryData
|
||||
from .shade_data import PowerviewShadeData
|
||||
@ -64,6 +64,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) ->
|
||||
)
|
||||
return False
|
||||
|
||||
# manual registration of the hub
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, hub.mac_address)},
|
||||
identifiers={(DOMAIN, hub.serial_number)},
|
||||
manufacturer=MANUFACTURER,
|
||||
name=hub.name,
|
||||
model=hub.model,
|
||||
sw_version=hub.firmware,
|
||||
hw_version=hub.main_processor_version.name,
|
||||
)
|
||||
|
||||
try:
|
||||
rooms = Rooms(pv_request)
|
||||
room_data: PowerviewData = await rooms.get_rooms()
|
||||
|
@ -60,15 +60,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
self._devices_last_update: set[str] = set()
|
||||
self._zones_last_update: dict[str, set[str]] = {}
|
||||
self._areas_last_update: dict[str, set[int]] = {}
|
||||
|
||||
def _async_add_remove_devices_and_entities(self, data: MowerDictionary) -> None:
|
||||
"""Add/remove devices and dynamic entities, when amount of devices changed."""
|
||||
self._async_add_remove_devices(data)
|
||||
for mower_id in data:
|
||||
if data[mower_id].capabilities.stay_out_zones:
|
||||
self._async_add_remove_stay_out_zones(data)
|
||||
if data[mower_id].capabilities.work_areas:
|
||||
self._async_add_remove_work_areas(data)
|
||||
self.async_add_listener(self._on_data_update)
|
||||
|
||||
async def _async_update_data(self) -> MowerDictionary:
|
||||
"""Subscribe for websocket and poll data from the API."""
|
||||
@ -82,14 +74,38 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
raise UpdateFailed(err) from err
|
||||
except AuthError as err:
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
self._async_add_remove_devices_and_entities(data)
|
||||
return data
|
||||
|
||||
@callback
|
||||
def _on_data_update(self) -> None:
|
||||
"""Handle data updates and process dynamic entity management."""
|
||||
if self.data is not None:
|
||||
self._async_add_remove_devices()
|
||||
for mower_id in self.data:
|
||||
if self.data[mower_id].capabilities.stay_out_zones:
|
||||
self._async_add_remove_stay_out_zones()
|
||||
if self.data[mower_id].capabilities.work_areas:
|
||||
self._async_add_remove_work_areas()
|
||||
|
||||
@callback
|
||||
def handle_websocket_updates(self, ws_data: MowerDictionary) -> None:
|
||||
"""Process websocket callbacks and write them to the DataUpdateCoordinator."""
|
||||
self.hass.async_create_task(self._process_websocket_update(ws_data))
|
||||
|
||||
async def _process_websocket_update(self, ws_data: MowerDictionary) -> None:
|
||||
"""Handle incoming websocket update and update coordinator data."""
|
||||
for data in ws_data.values():
|
||||
existing_areas = data.work_areas or {}
|
||||
for task in data.calendar.tasks:
|
||||
work_area_id = task.work_area_id
|
||||
if work_area_id is not None and work_area_id not in existing_areas:
|
||||
_LOGGER.debug(
|
||||
"New work area %s detected, refreshing data", work_area_id
|
||||
)
|
||||
await self.async_request_refresh()
|
||||
return
|
||||
|
||||
self.async_set_updated_data(ws_data)
|
||||
self._async_add_remove_devices_and_entities(ws_data)
|
||||
|
||||
@callback
|
||||
def async_set_updated_data(self, data: MowerDictionary) -> None:
|
||||
@ -138,9 +154,9 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
"reconnect_task",
|
||||
)
|
||||
|
||||
def _async_add_remove_devices(self, data: MowerDictionary) -> None:
|
||||
def _async_add_remove_devices(self) -> None:
|
||||
"""Add new device, remove non-existing device."""
|
||||
current_devices = set(data)
|
||||
current_devices = set(self.data)
|
||||
|
||||
# Skip update if no changes
|
||||
if current_devices == self._devices_last_update:
|
||||
@ -155,7 +171,6 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
# Process new device
|
||||
new_devices = current_devices - self._devices_last_update
|
||||
if new_devices:
|
||||
self.data = data
|
||||
_LOGGER.debug("New devices found: %s", ", ".join(map(str, new_devices)))
|
||||
self._add_new_devices(new_devices)
|
||||
|
||||
@ -179,11 +194,11 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
for mower_callback in self.new_devices_callbacks:
|
||||
mower_callback(new_devices)
|
||||
|
||||
def _async_add_remove_stay_out_zones(self, data: MowerDictionary) -> None:
|
||||
def _async_add_remove_stay_out_zones(self) -> None:
|
||||
"""Add new stay-out zones, remove non-existing stay-out zones."""
|
||||
current_zones = {
|
||||
mower_id: set(mower_data.stay_out_zones.zones)
|
||||
for mower_id, mower_data in data.items()
|
||||
for mower_id, mower_data in self.data.items()
|
||||
if mower_data.capabilities.stay_out_zones
|
||||
and mower_data.stay_out_zones is not None
|
||||
}
|
||||
@ -225,11 +240,11 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
|
||||
return current_zones
|
||||
|
||||
def _async_add_remove_work_areas(self, data: MowerDictionary) -> None:
|
||||
def _async_add_remove_work_areas(self) -> None:
|
||||
"""Add new work areas, remove non-existing work areas."""
|
||||
current_areas = {
|
||||
mower_id: set(mower_data.work_areas)
|
||||
for mower_id, mower_data in data.items()
|
||||
for mower_id, mower_data in self.data.items()
|
||||
if mower_data.capabilities.work_areas and mower_data.work_areas is not None
|
||||
}
|
||||
|
||||
|
@ -112,16 +112,8 @@ class HuumDevice(ClimateEntity):
|
||||
await self._turn_on(temperature)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest status data.
|
||||
|
||||
We get the latest status first from the status endpoints of the sauna.
|
||||
If that data does not include the temperature, that means that the sauna
|
||||
is off, we then call the off command which will in turn return the temperature.
|
||||
This is a workaround for getting the temperature as the Huum API does not
|
||||
return the target temperature of a sauna that is off, even if it can have
|
||||
a target temperature at that time.
|
||||
"""
|
||||
self._status = await self._huum_handler.status_from_status_or_stop()
|
||||
"""Get the latest status data."""
|
||||
self._status = await self._huum_handler.status()
|
||||
if self._target_temperature is None or self.hvac_mode == HVACMode.HEAT:
|
||||
self._target_temperature = self._status.target_temperature
|
||||
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/huum",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["huum==0.7.12"]
|
||||
"requirements": ["huum==0.8.0"]
|
||||
}
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -9,14 +11,20 @@ from homeassistant.helpers.device import (
|
||||
async_entity_id_to_device_id,
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.helper_integration import async_handle_source_entity_changes
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
)
|
||||
|
||||
from .const import CONF_SOURCE_SENSOR
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Integration from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass,
|
||||
entry.entry_id,
|
||||
@ -29,20 +37,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
options={**entry.options, CONF_SOURCE_SENSOR: source_entity_id},
|
||||
)
|
||||
|
||||
async def source_entity_removed() -> None:
|
||||
# The source entity has been removed, we need to clean the device links.
|
||||
async_remove_stale_devices_links_keep_entity_device(hass, entry.entry_id, None)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_handle_source_entity_changes(
|
||||
hass,
|
||||
add_helper_config_entry_to_device=False,
|
||||
helper_config_entry_id=entry.entry_id,
|
||||
set_source_entity_id_or_uuid=set_source_entity_id_or_uuid,
|
||||
source_device_id=async_entity_id_to_device_id(
|
||||
hass, entry.options[CONF_SOURCE_SENSOR]
|
||||
),
|
||||
source_entity_id_or_uuid=entry.options[CONF_SOURCE_SENSOR],
|
||||
source_entity_removed=source_entity_removed,
|
||||
)
|
||||
)
|
||||
|
||||
@ -51,6 +55,40 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s.%s", config_entry.version, config_entry.minor_version
|
||||
)
|
||||
|
||||
if config_entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
if config_entry.version == 1:
|
||||
options = {**config_entry.options}
|
||||
if config_entry.minor_version < 2:
|
||||
# Remove the integration config entry from the source device
|
||||
if source_device_id := async_entity_id_to_device_id(
|
||||
hass, options[CONF_SOURCE_SENSOR]
|
||||
):
|
||||
async_remove_helper_config_entry_from_source_device(
|
||||
hass,
|
||||
helper_config_entry_id=config_entry.entry_id,
|
||||
source_device_id=source_device_id,
|
||||
)
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, options=options, minor_version=2
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
# Remove device link for entry, the source device may have changed.
|
||||
|
@ -147,6 +147,8 @@ OPTIONS_FLOW = {
|
||||
class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
"""Handle a config or options flow for Integration."""
|
||||
|
||||
MINOR_VERSION = 2
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
|
||||
|
@ -40,8 +40,7 @@ from homeassistant.core import (
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.device import async_entity_id_to_device
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -246,11 +245,6 @@ async def async_setup_entry(
|
||||
registry, config_entry.options[CONF_SOURCE_SENSOR]
|
||||
)
|
||||
|
||||
device_info = async_device_info_to_link_from_entity(
|
||||
hass,
|
||||
source_entity_id,
|
||||
)
|
||||
|
||||
if (unit_prefix := config_entry.options.get(CONF_UNIT_PREFIX)) == "none":
|
||||
# Before we had support for optional selectors, "none" was used for selecting nothing
|
||||
unit_prefix = None
|
||||
@ -265,6 +259,7 @@ async def async_setup_entry(
|
||||
round_digits = int(round_digits)
|
||||
|
||||
integral = IntegrationSensor(
|
||||
hass,
|
||||
integration_method=config_entry.options[CONF_METHOD],
|
||||
name=config_entry.title,
|
||||
round_digits=round_digits,
|
||||
@ -272,7 +267,6 @@ async def async_setup_entry(
|
||||
unique_id=config_entry.entry_id,
|
||||
unit_prefix=unit_prefix,
|
||||
unit_time=config_entry.options[CONF_UNIT_TIME],
|
||||
device_info=device_info,
|
||||
max_sub_interval=max_sub_interval,
|
||||
)
|
||||
|
||||
@ -287,6 +281,7 @@ async def async_setup_platform(
|
||||
) -> None:
|
||||
"""Set up the integration sensor."""
|
||||
integral = IntegrationSensor(
|
||||
hass,
|
||||
integration_method=config[CONF_METHOD],
|
||||
name=config.get(CONF_NAME),
|
||||
round_digits=config.get(CONF_ROUND_DIGITS),
|
||||
@ -308,6 +303,7 @@ class IntegrationSensor(RestoreSensor):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
integration_method: str,
|
||||
name: str | None,
|
||||
@ -317,7 +313,6 @@ class IntegrationSensor(RestoreSensor):
|
||||
unit_prefix: str | None,
|
||||
unit_time: UnitOfTime,
|
||||
max_sub_interval: timedelta | None,
|
||||
device_info: DeviceInfo | None = None,
|
||||
) -> None:
|
||||
"""Initialize the integration sensor."""
|
||||
self._attr_unique_id = unique_id
|
||||
@ -335,7 +330,10 @@ class IntegrationSensor(RestoreSensor):
|
||||
self._attr_icon = "mdi:chart-histogram"
|
||||
self._source_entity: str = source_entity
|
||||
self._last_valid_state: Decimal | None = None
|
||||
self._attr_device_info = device_info
|
||||
self.device_entry = async_entity_id_to_device(
|
||||
hass,
|
||||
source_entity,
|
||||
)
|
||||
self._max_sub_interval: timedelta | None = (
|
||||
None # disable time based integration
|
||||
if max_sub_interval is None or max_sub_interval.total_seconds() == 0
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
@ -21,6 +22,8 @@ from .const import CONTENT_TYPE_MAP, LOGGER, MAX_IMAGE_WIDTH
|
||||
from .coordinator import JellyfinConfigEntry, JellyfinDataUpdateCoordinator
|
||||
from .entity import JellyfinClientEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@ -177,10 +180,15 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
def supported_features(self) -> MediaPlayerEntityFeature:
|
||||
"""Flag media player features that are supported."""
|
||||
commands: list[str] = self.capabilities.get("SupportedCommands", [])
|
||||
controllable = self.capabilities.get("SupportsMediaControl", False)
|
||||
_LOGGER.debug(
|
||||
"Supported commands for device %s, client %s, %s",
|
||||
self.device_name,
|
||||
self.client_name,
|
||||
commands,
|
||||
)
|
||||
features = MediaPlayerEntityFeature(0)
|
||||
|
||||
if controllable:
|
||||
if "PlayMediaSource" in commands:
|
||||
features |= (
|
||||
MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
|
@ -13,8 +13,7 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers import event
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@ -23,36 +22,29 @@ from .entity import JewishCalendarConfigEntry, JewishCalendarEntity
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class JewishCalendarBinarySensorMixIns(BinarySensorEntityDescription):
|
||||
"""Binary Sensor description mixin class for Jewish Calendar."""
|
||||
|
||||
is_on: Callable[[Zmanim, dt.datetime], bool] = lambda _, __: False
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class JewishCalendarBinarySensorEntityDescription(
|
||||
JewishCalendarBinarySensorMixIns, BinarySensorEntityDescription
|
||||
):
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class JewishCalendarBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Binary Sensor Entity description for Jewish Calendar."""
|
||||
|
||||
is_on: Callable[[Zmanim], Callable[[dt.datetime], bool]]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[JewishCalendarBinarySensorEntityDescription, ...] = (
|
||||
JewishCalendarBinarySensorEntityDescription(
|
||||
key="issur_melacha_in_effect",
|
||||
translation_key="issur_melacha_in_effect",
|
||||
is_on=lambda state, now: bool(state.issur_melacha_in_effect(now)),
|
||||
is_on=lambda state: state.issur_melacha_in_effect,
|
||||
),
|
||||
JewishCalendarBinarySensorEntityDescription(
|
||||
key="erev_shabbat_hag",
|
||||
translation_key="erev_shabbat_hag",
|
||||
is_on=lambda state, now: bool(state.erev_shabbat_chag(now)),
|
||||
is_on=lambda state: state.erev_shabbat_chag,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JewishCalendarBinarySensorEntityDescription(
|
||||
key="motzei_shabbat_hag",
|
||||
translation_key="motzei_shabbat_hag",
|
||||
is_on=lambda state, now: bool(state.motzei_shabbat_chag(now)),
|
||||
is_on=lambda state: state.motzei_shabbat_chag,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
@ -73,9 +65,7 @@ async def async_setup_entry(
|
||||
class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity):
|
||||
"""Representation of an Jewish Calendar binary sensor."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
_update_unsub: CALLBACK_TYPE | None = None
|
||||
|
||||
entity_description: JewishCalendarBinarySensorEntityDescription
|
||||
|
||||
@ -83,40 +73,12 @@ class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if sensor is on."""
|
||||
zmanim = self.make_zmanim(dt.date.today())
|
||||
return self.entity_description.is_on(zmanim, dt_util.now())
|
||||
return self.entity_description.is_on(zmanim)(dt_util.now())
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._schedule_update()
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
if self._update_unsub:
|
||||
self._update_unsub()
|
||||
self._update_unsub = None
|
||||
return await super().async_will_remove_from_hass()
|
||||
|
||||
@callback
|
||||
def _update(self, now: dt.datetime | None = None) -> None:
|
||||
"""Update the state of the sensor."""
|
||||
self._update_unsub = None
|
||||
self._schedule_update()
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _schedule_update(self) -> None:
|
||||
"""Schedule the next update of the sensor."""
|
||||
now = dt_util.now()
|
||||
zmanim = self.make_zmanim(dt.date.today())
|
||||
update = zmanim.netz_hachama.local + dt.timedelta(days=1)
|
||||
candle_lighting = zmanim.candle_lighting
|
||||
if candle_lighting is not None and now < candle_lighting < update:
|
||||
update = candle_lighting
|
||||
havdalah = zmanim.havdalah
|
||||
if havdalah is not None and now < havdalah < update:
|
||||
update = havdalah
|
||||
if self._update_unsub:
|
||||
self._update_unsub()
|
||||
self._update_unsub = event.async_track_point_in_time(
|
||||
self.hass, self._update, update
|
||||
)
|
||||
def _update_times(self, zmanim: Zmanim) -> list[dt.datetime | None]:
|
||||
"""Return a list of times to update the sensor."""
|
||||
return [
|
||||
zmanim.netz_hachama.local + dt.timedelta(days=1),
|
||||
zmanim.candle_lighting,
|
||||
zmanim.havdalah,
|
||||
]
|
||||
|
@ -1,17 +1,24 @@
|
||||
"""Entity representing a Jewish Calendar sensor."""
|
||||
|
||||
from abc import abstractmethod
|
||||
from dataclasses import dataclass
|
||||
import datetime as dt
|
||||
import logging
|
||||
|
||||
from hdate import HDateInfo, Location, Zmanim
|
||||
from hdate.translator import Language, set_language
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import CALLBACK_TYPE, callback
|
||||
from homeassistant.helpers import event
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type JewishCalendarConfigEntry = ConfigEntry[JewishCalendarData]
|
||||
|
||||
|
||||
@ -39,6 +46,8 @@ class JewishCalendarEntity(Entity):
|
||||
"""An HA implementation for Jewish Calendar entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_update_unsub: CALLBACK_TYPE | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -63,3 +72,55 @@ class JewishCalendarEntity(Entity):
|
||||
candle_lighting_offset=self.data.candle_lighting_offset,
|
||||
havdalah_offset=self.data.havdalah_offset,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._schedule_update()
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
if self._update_unsub:
|
||||
self._update_unsub()
|
||||
self._update_unsub = None
|
||||
return await super().async_will_remove_from_hass()
|
||||
|
||||
@abstractmethod
|
||||
def _update_times(self, zmanim: Zmanim) -> list[dt.datetime | None]:
|
||||
"""Return a list of times to update the sensor."""
|
||||
|
||||
def _schedule_update(self) -> None:
|
||||
"""Schedule the next update of the sensor."""
|
||||
now = dt_util.now()
|
||||
zmanim = self.make_zmanim(now.date())
|
||||
update = dt_util.start_of_local_day() + dt.timedelta(days=1)
|
||||
|
||||
for update_time in self._update_times(zmanim):
|
||||
if update_time is not None and now < update_time < update:
|
||||
update = update_time
|
||||
|
||||
if self._update_unsub:
|
||||
self._update_unsub()
|
||||
self._update_unsub = event.async_track_point_in_time(
|
||||
self.hass, self._update, update
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update(self, now: dt.datetime | None = None) -> None:
|
||||
"""Update the sensor data."""
|
||||
self._update_unsub = None
|
||||
self._schedule_update()
|
||||
self.create_results(now)
|
||||
self.async_write_ha_state()
|
||||
|
||||
def create_results(self, now: dt.datetime | None = None) -> None:
|
||||
"""Create the results for the sensor."""
|
||||
if now is None:
|
||||
now = dt_util.now()
|
||||
|
||||
_LOGGER.debug("Now: %s Location: %r", now, self.data.location)
|
||||
|
||||
today = now.date()
|
||||
zmanim = self.make_zmanim(today)
|
||||
dateinfo = HDateInfo(today, diaspora=self.data.diaspora)
|
||||
self.data.results = JewishCalendarDataResults(dateinfo, zmanim)
|
||||
|
@ -17,16 +17,11 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers import event
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .entity import (
|
||||
JewishCalendarConfigEntry,
|
||||
JewishCalendarDataResults,
|
||||
JewishCalendarEntity,
|
||||
)
|
||||
from .entity import JewishCalendarConfigEntry, JewishCalendarEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -217,7 +212,7 @@ async def async_setup_entry(
|
||||
config_entry: JewishCalendarConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Jewish calendar sensors ."""
|
||||
"""Set up the Jewish calendar sensors."""
|
||||
sensors: list[JewishCalendarBaseSensor] = [
|
||||
JewishCalendarSensor(config_entry, description) for description in INFO_SENSORS
|
||||
]
|
||||
@ -231,59 +226,15 @@ async def async_setup_entry(
|
||||
class JewishCalendarBaseSensor(JewishCalendarEntity, SensorEntity):
|
||||
"""Base class for Jewish calendar sensors."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
_update_unsub: CALLBACK_TYPE | None = None
|
||||
|
||||
entity_description: JewishCalendarBaseSensorDescription
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._schedule_update()
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
if self._update_unsub:
|
||||
self._update_unsub()
|
||||
self._update_unsub = None
|
||||
return await super().async_will_remove_from_hass()
|
||||
|
||||
def _schedule_update(self) -> None:
|
||||
"""Schedule the next update of the sensor."""
|
||||
now = dt_util.now()
|
||||
zmanim = self.make_zmanim(now.date())
|
||||
update = None
|
||||
if self.entity_description.next_update_fn:
|
||||
update = self.entity_description.next_update_fn(zmanim)
|
||||
next_midnight = dt_util.start_of_local_day() + dt.timedelta(days=1)
|
||||
if update is None or now > update:
|
||||
update = next_midnight
|
||||
if self._update_unsub:
|
||||
self._update_unsub()
|
||||
self._update_unsub = event.async_track_point_in_time(
|
||||
self.hass, self._update_data, update
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_data(self, now: dt.datetime | None = None) -> None:
|
||||
"""Update the sensor data."""
|
||||
self._update_unsub = None
|
||||
self._schedule_update()
|
||||
self.create_results(now)
|
||||
self.async_write_ha_state()
|
||||
|
||||
def create_results(self, now: dt.datetime | None = None) -> None:
|
||||
"""Create the results for the sensor."""
|
||||
if now is None:
|
||||
now = dt_util.now()
|
||||
|
||||
_LOGGER.debug("Now: %s Location: %r", now, self.data.location)
|
||||
|
||||
today = now.date()
|
||||
zmanim = self.make_zmanim(today)
|
||||
dateinfo = HDateInfo(today, diaspora=self.data.diaspora)
|
||||
self.data.results = JewishCalendarDataResults(dateinfo, zmanim)
|
||||
def _update_times(self, zmanim: Zmanim) -> list[dt.datetime | None]:
|
||||
"""Return a list of times to update the sensor."""
|
||||
if self.entity_description.next_update_fn is None:
|
||||
return []
|
||||
return [self.entity_description.next_update_fn(zmanim)]
|
||||
|
||||
def get_dateinfo(self, now: dt.datetime | None = None) -> HDateInfo:
|
||||
"""Get the next date info."""
|
||||
|
@ -50,7 +50,6 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
today = now.date()
|
||||
event_date = get_astral_event_date(hass, SUN_EVENT_SUNSET, today)
|
||||
if event_date is None:
|
||||
_LOGGER.error("Can't get sunset event date for %s", today)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="sunset_event"
|
||||
)
|
||||
|
@ -13,8 +13,8 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/keymitt_ble",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"iot_class": "assumed_state",
|
||||
"loggers": ["keymitt_ble"],
|
||||
"requirements": ["PyMicroBot==0.0.17"]
|
||||
"loggers": ["keymitt_ble", "microbot"],
|
||||
"requirements": ["PyMicroBot==0.0.23"]
|
||||
}
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Any, Final, Literal
|
||||
|
||||
@ -20,8 +19,8 @@ from xknx.io.util import validate_ip as xknx_validate_ip
|
||||
from xknx.secure.keyring import Keyring, XMLInterface
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigEntry,
|
||||
ConfigEntryBaseFlow,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
@ -103,12 +102,14 @@ _PORT_SELECTOR = vol.All(
|
||||
)
|
||||
|
||||
|
||||
class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
"""Base class for KNX flows."""
|
||||
class KNXConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a KNX config flow."""
|
||||
|
||||
def __init__(self, initial_data: KNXConfigEntryData) -> None:
|
||||
"""Initialize KNXCommonFlow."""
|
||||
self.initial_data = initial_data
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize KNX config flow."""
|
||||
self.initial_data = DEFAULT_ENTRY_DATA
|
||||
self.new_entry_data = KNXConfigEntryData()
|
||||
self.new_title: str | None = None
|
||||
|
||||
@ -121,19 +122,21 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
self._gatewayscanner: GatewayScanner | None = None
|
||||
self._async_scan_gen: AsyncGenerator[GatewayDescriptor] | None = None
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> KNXOptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return KNXOptionsFlow(config_entry)
|
||||
|
||||
@property
|
||||
def _xknx(self) -> XKNX:
|
||||
"""Return XKNX instance."""
|
||||
if isinstance(self, OptionsFlow) and (
|
||||
if (self.source == SOURCE_RECONFIGURE) and (
|
||||
knx_module := self.hass.data.get(KNX_MODULE_KEY)
|
||||
):
|
||||
return knx_module.xknx
|
||||
return XKNX()
|
||||
|
||||
@abstractmethod
|
||||
def finish_flow(self) -> ConfigFlowResult:
|
||||
"""Finish the flow."""
|
||||
|
||||
@property
|
||||
def connection_type(self) -> str:
|
||||
"""Return the configured connection type."""
|
||||
@ -150,6 +153,61 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
self.initial_data.get(CONF_KNX_TUNNEL_ENDPOINT_IA),
|
||||
)
|
||||
|
||||
@callback
|
||||
def finish_flow(self) -> ConfigFlowResult:
|
||||
"""Create or update the ConfigEntry."""
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
entry = self._get_reconfigure_entry()
|
||||
_tunnel_endpoint_str = self.initial_data.get(
|
||||
CONF_KNX_TUNNEL_ENDPOINT_IA, "Tunneling"
|
||||
)
|
||||
if self.new_title and not entry.title.startswith(
|
||||
# Overwrite standard titles, but not user defined ones
|
||||
(
|
||||
f"KNX {self.initial_data[CONF_KNX_CONNECTION_TYPE]}",
|
||||
CONF_KNX_AUTOMATIC.capitalize(),
|
||||
"Tunneling @ ",
|
||||
f"{_tunnel_endpoint_str} @",
|
||||
"Tunneling UDP @ ",
|
||||
"Tunneling TCP @ ",
|
||||
"Secure Tunneling",
|
||||
"Routing as ",
|
||||
"Secure Routing as ",
|
||||
)
|
||||
):
|
||||
self.new_title = None
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=self.new_entry_data,
|
||||
title=self.new_title or UNDEFINED,
|
||||
)
|
||||
|
||||
title = self.new_title or f"KNX {self.new_entry_data[CONF_KNX_CONNECTION_TYPE]}"
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data=DEFAULT_ENTRY_DATA | self.new_entry_data,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
return await self.async_step_connection_type()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of existing entry."""
|
||||
entry = self._get_reconfigure_entry()
|
||||
self.initial_data = dict(entry.data) # type: ignore[assignment]
|
||||
return self.async_show_menu(
|
||||
step_id="reconfigure",
|
||||
menu_options=[
|
||||
"connection_type",
|
||||
"secure_knxkeys",
|
||||
],
|
||||
)
|
||||
|
||||
async def async_step_connection_type(
|
||||
self, user_input: dict | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@ -441,7 +499,7 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
)
|
||||
ip_address: str | None
|
||||
if ( # initial attempt on ConfigFlow or coming from automatic / routing
|
||||
(isinstance(self, ConfigFlow) or not _reconfiguring_existing_tunnel)
|
||||
not _reconfiguring_existing_tunnel
|
||||
and not user_input
|
||||
and self._selected_tunnel is not None
|
||||
): # default to first found tunnel
|
||||
@ -841,52 +899,20 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
)
|
||||
|
||||
|
||||
class KNXConfigFlow(KNXCommonFlow, ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a KNX config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize KNX options flow."""
|
||||
super().__init__(initial_data=DEFAULT_ENTRY_DATA)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> KNXOptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return KNXOptionsFlow(config_entry)
|
||||
|
||||
@callback
|
||||
def finish_flow(self) -> ConfigFlowResult:
|
||||
"""Create the ConfigEntry."""
|
||||
title = self.new_title or f"KNX {self.new_entry_data[CONF_KNX_CONNECTION_TYPE]}"
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data=DEFAULT_ENTRY_DATA | self.new_entry_data,
|
||||
)
|
||||
|
||||
async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
return await self.async_step_connection_type()
|
||||
|
||||
|
||||
class KNXOptionsFlow(KNXCommonFlow, OptionsFlow):
|
||||
class KNXOptionsFlow(OptionsFlow):
|
||||
"""Handle KNX options."""
|
||||
|
||||
general_settings: dict
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize KNX options flow."""
|
||||
super().__init__(initial_data=config_entry.data) # type: ignore[arg-type]
|
||||
self.initial_data = dict(config_entry.data)
|
||||
|
||||
@callback
|
||||
def finish_flow(self) -> ConfigFlowResult:
|
||||
def finish_flow(self, new_entry_data: KNXConfigEntryData) -> ConfigFlowResult:
|
||||
"""Update the ConfigEntry and finish the flow."""
|
||||
new_data = DEFAULT_ENTRY_DATA | self.initial_data | self.new_entry_data
|
||||
new_data = self.initial_data | new_entry_data
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry,
|
||||
data=new_data,
|
||||
title=self.new_title or UNDEFINED,
|
||||
)
|
||||
return self.async_create_entry(title="", data={})
|
||||
|
||||
@ -894,26 +920,20 @@ class KNXOptionsFlow(KNXCommonFlow, OptionsFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage KNX options."""
|
||||
return self.async_show_menu(
|
||||
step_id="init",
|
||||
menu_options=[
|
||||
"connection_type",
|
||||
"communication_settings",
|
||||
"secure_knxkeys",
|
||||
],
|
||||
)
|
||||
return await self.async_step_communication_settings()
|
||||
|
||||
async def async_step_communication_settings(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage KNX communication settings."""
|
||||
if user_input is not None:
|
||||
self.new_entry_data = KNXConfigEntryData(
|
||||
state_updater=user_input[CONF_KNX_STATE_UPDATER],
|
||||
rate_limit=user_input[CONF_KNX_RATE_LIMIT],
|
||||
telegram_log_size=user_input[CONF_KNX_TELEGRAM_LOG_SIZE],
|
||||
return self.finish_flow(
|
||||
KNXConfigEntryData(
|
||||
state_updater=user_input[CONF_KNX_STATE_UPDATER],
|
||||
rate_limit=user_input[CONF_KNX_RATE_LIMIT],
|
||||
telegram_log_size=user_input[CONF_KNX_TELEGRAM_LOG_SIZE],
|
||||
)
|
||||
)
|
||||
return self.finish_flow()
|
||||
|
||||
data_schema = {
|
||||
vol.Required(
|
||||
|
@ -104,7 +104,7 @@ rules:
|
||||
Since all entities are configured manually, names are user-defined.
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
|
@ -1,6 +1,13 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"reconfigure": {
|
||||
"title": "KNX connection settings",
|
||||
"menu_options": {
|
||||
"connection_type": "Reconfigure KNX connection",
|
||||
"secure_knxkeys": "Import KNX keyring file"
|
||||
}
|
||||
},
|
||||
"connection_type": {
|
||||
"title": "KNX connection",
|
||||
"description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.)\n\n'Tunneling' will connect to a specific KNX IP interface over a tunnel.\n\n'Routing' will use Multicast to communicate with KNX IP routers.",
|
||||
@ -65,7 +72,7 @@
|
||||
},
|
||||
"secure_knxkeys": {
|
||||
"title": "Import KNX Keyring",
|
||||
"description": "The Keyring is used to encrypt and decrypt KNX IP Secure communication.",
|
||||
"description": "The keyring is used to encrypt and decrypt KNX IP Secure communication. You can import a new keyring file or re-import to update existing keys if your configuration has changed.",
|
||||
"data": {
|
||||
"knxkeys_file": "Keyring file",
|
||||
"knxkeys_password": "Keyring password"
|
||||
@ -129,6 +136,9 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_backbone_key": "Invalid backbone key. 32 hexadecimal digits expected.",
|
||||
@ -159,16 +169,8 @@
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "KNX Settings",
|
||||
"menu_options": {
|
||||
"connection_type": "Configure KNX interface",
|
||||
"communication_settings": "Communication settings",
|
||||
"secure_knxkeys": "Import a `.knxkeys` file"
|
||||
}
|
||||
},
|
||||
"communication_settings": {
|
||||
"title": "[%key:component::knx::options::step::init::menu_options::communication_settings%]",
|
||||
"title": "Communication settings",
|
||||
"data": {
|
||||
"state_updater": "State updater",
|
||||
"rate_limit": "Rate limit",
|
||||
@ -179,147 +181,7 @@
|
||||
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: `0` or between `20` and `40`",
|
||||
"telegram_log_size": "Telegrams to keep in memory for KNX panel group monitor. Maximum: {telegram_log_size_max}"
|
||||
}
|
||||
},
|
||||
"connection_type": {
|
||||
"title": "[%key:component::knx::config::step::connection_type::title%]",
|
||||
"description": "[%key:component::knx::config::step::connection_type::description%]",
|
||||
"data": {
|
||||
"connection_type": "[%key:component::knx::config::step::connection_type::data::connection_type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"connection_type": "[%key:component::knx::config::step::connection_type::data_description::connection_type%]"
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"title": "[%key:component::knx::config::step::tunnel::title%]",
|
||||
"data": {
|
||||
"gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]"
|
||||
},
|
||||
"data_description": {
|
||||
"gateway": "[%key:component::knx::config::step::tunnel::data_description::gateway%]"
|
||||
}
|
||||
},
|
||||
"tcp_tunnel_endpoint": {
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"manual_tunnel": {
|
||||
"title": "[%key:component::knx::config::step::manual_tunnel::title%]",
|
||||
"description": "[%key:component::knx::config::step::manual_tunnel::description%]",
|
||||
"data": {
|
||||
"tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data::tunneling_type%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"route_back": "[%key:component::knx::config::step::manual_tunnel::data::route_back%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data_description::tunneling_type%]",
|
||||
"port": "[%key:component::knx::config::step::manual_tunnel::data_description::port%]",
|
||||
"host": "[%key:component::knx::config::step::manual_tunnel::data_description::host%]",
|
||||
"route_back": "[%key:component::knx::config::step::manual_tunnel::data_description::route_back%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
|
||||
}
|
||||
},
|
||||
"secure_key_source_menu_tunnel": {
|
||||
"title": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::description%]",
|
||||
"menu_options": {
|
||||
"secure_knxkeys": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::menu_options::secure_knxkeys%]",
|
||||
"secure_tunnel_manual": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::menu_options::secure_tunnel_manual%]"
|
||||
}
|
||||
},
|
||||
"secure_key_source_menu_routing": {
|
||||
"title": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::description%]",
|
||||
"menu_options": {
|
||||
"secure_knxkeys": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::menu_options::secure_knxkeys%]",
|
||||
"secure_routing_manual": "[%key:component::knx::config::step::secure_key_source_menu_routing::menu_options::secure_routing_manual%]"
|
||||
}
|
||||
},
|
||||
"secure_knxkeys": {
|
||||
"title": "[%key:component::knx::config::step::secure_knxkeys::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_knxkeys::description%]",
|
||||
"data": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]"
|
||||
}
|
||||
},
|
||||
"knxkeys_tunnel_select": {
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"secure_tunnel_manual": {
|
||||
"title": "[%key:component::knx::config::step::secure_tunnel_manual::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_tunnel_manual::description%]",
|
||||
"data": {
|
||||
"user_id": "[%key:component::knx::config::step::secure_tunnel_manual::data::user_id%]",
|
||||
"user_password": "[%key:component::knx::config::step::secure_tunnel_manual::data::user_password%]",
|
||||
"device_authentication": "[%key:component::knx::config::step::secure_tunnel_manual::data::device_authentication%]"
|
||||
},
|
||||
"data_description": {
|
||||
"user_id": "[%key:component::knx::config::step::secure_tunnel_manual::data_description::user_id%]",
|
||||
"user_password": "[%key:component::knx::config::step::secure_tunnel_manual::data_description::user_password%]",
|
||||
"device_authentication": "[%key:component::knx::config::step::secure_tunnel_manual::data_description::device_authentication%]"
|
||||
}
|
||||
},
|
||||
"secure_routing_manual": {
|
||||
"title": "[%key:component::knx::config::step::secure_routing_manual::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_tunnel_manual::description%]",
|
||||
"data": {
|
||||
"backbone_key": "[%key:component::knx::config::step::secure_routing_manual::data::backbone_key%]",
|
||||
"sync_latency_tolerance": "[%key:component::knx::config::step::secure_routing_manual::data::sync_latency_tolerance%]"
|
||||
},
|
||||
"data_description": {
|
||||
"backbone_key": "[%key:component::knx::config::step::secure_routing_manual::data_description::backbone_key%]",
|
||||
"sync_latency_tolerance": "[%key:component::knx::config::step::secure_routing_manual::data_description::sync_latency_tolerance%]"
|
||||
}
|
||||
},
|
||||
"routing": {
|
||||
"title": "[%key:component::knx::config::step::routing::title%]",
|
||||
"description": "[%key:component::knx::config::step::routing::description%]",
|
||||
"data": {
|
||||
"individual_address": "[%key:component::knx::config::step::routing::data::individual_address%]",
|
||||
"routing_secure": "[%key:component::knx::config::step::routing::data::routing_secure%]",
|
||||
"multicast_group": "[%key:component::knx::config::step::routing::data::multicast_group%]",
|
||||
"multicast_port": "[%key:component::knx::config::step::routing::data::multicast_port%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"individual_address": "[%key:component::knx::config::step::routing::data_description::individual_address%]",
|
||||
"routing_secure": "[%key:component::knx::config::step::routing::data_description::routing_secure%]",
|
||||
"multicast_group": "[%key:component::knx::config::step::routing::data_description::multicast_group%]",
|
||||
"multicast_port": "[%key:component::knx::config::step::routing::data_description::multicast_port%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_backbone_key": "[%key:component::knx::config::error::invalid_backbone_key%]",
|
||||
"invalid_individual_address": "[%key:component::knx::config::error::invalid_individual_address%]",
|
||||
"invalid_ip_address": "[%key:component::knx::config::error::invalid_ip_address%]",
|
||||
"keyfile_no_backbone_key": "[%key:component::knx::config::error::keyfile_no_backbone_key%]",
|
||||
"keyfile_invalid_signature": "[%key:component::knx::config::error::keyfile_invalid_signature%]",
|
||||
"keyfile_no_tunnel_for_host": "[%key:component::knx::config::error::keyfile_no_tunnel_for_host%]",
|
||||
"keyfile_not_found": "[%key:component::knx::config::error::keyfile_not_found%]",
|
||||
"no_router_discovered": "[%key:component::knx::config::error::no_router_discovered%]",
|
||||
"no_tunnel_discovered": "[%key:component::knx::config::error::no_tunnel_discovered%]",
|
||||
"unsupported_tunnel_type": "[%key:component::knx::config::error::unsupported_tunnel_type%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from functools import wraps
|
||||
import inspect
|
||||
from typing import TYPE_CHECKING, Any, Final, overload
|
||||
|
||||
import knx_frontend as knx_panel
|
||||
@ -116,7 +116,7 @@ def provide_knx(
|
||||
"KNX integration not loaded.",
|
||||
)
|
||||
|
||||
if asyncio.iscoroutinefunction(func):
|
||||
if inspect.iscoroutinefunction(func):
|
||||
|
||||
@wraps(func)
|
||||
async def with_knx(
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user