Compare commits

..

55 Commits

Author SHA1 Message Date
Paulus Schoutsen
17a5815ca1 Add AI Task prefs 2025-06-18 00:00:27 +00:00
Paulus Schoutsen
a8d4caab01 Remove GenTextTaskType 2025-06-17 12:54:49 -04:00
Paulus Schoutsen
2be6acec03 Add AI Task integration 2025-06-17 12:33:58 -04:00
Paulus Schoutsen
fa21269f0d Simplify ChatLog dependencies (#146351) 2025-06-15 17:41:15 -04:00
starkillerOG
5f5869ffc6 Bump reolink-aio to 0.14.1 (#146903) 2025-06-15 20:53:32 +02:00
Nathan Spencer
7a2d99a450 Bump pylitterbot to 2024.2.0 (#146901) 2025-06-15 20:41:07 +02:00
Andre Lengwenus
6b669ce40c Bump pypck to 0.8.8 (#146841) 2025-06-15 19:32:13 +02:00
Markus Adrario
fdf4ed2aa5 Homee add button_state to event entities (#146860)
* use entityDescription

* Add new event and adapt tests

* change translation

* use references in strings
2025-06-15 18:17:52 +02:00
Simone Chemelli
1361d10cd7 Bump aioamazondevices to 3.1.4 (#146883) 2025-06-15 08:30:19 -07:00
Marc Mueller
8c7ba11493 Fix telegram_bot RuntimeWarning in tests (#146781) 2025-06-15 11:23:17 +03:00
Marc Mueller
29ce17abf4 Update eq3btsmart to 2.1.0 (#146335)
* Update eq3btsmart to 2.1.0

* Update import names

* Update register callbacks

* Updated data model

* Update Thermostat set value methods

* Update Thermostat init

* Thermostat status and device_data are always given

* Minor compatibility fixes

---------

Co-authored-by: Lennard Beers <l.beers@outlook.de>
2025-06-15 10:17:01 +02:00
Markus Lanthaler
c988d1ce36 Add support for Gemini's new TTS capabilities (#145872)
* Add support for Gemini TTS

* Add tests

* Use wave library and update a few comments
2025-06-14 22:21:04 -07:00
Paulus Schoutsen
ec02f6d010 Extract Google LLM base entity class (#146817) 2025-06-14 22:17:52 -07:00
Simone Chemelli
9f19c4250a Bump aioamazondevices to 3.1.3 (#146828) 2025-06-15 01:45:28 +03:00
Marc Mueller
d7b583ae51 Update pydantic to 2.11.7 (#146835) 2025-06-14 23:31:09 +02:00
Maciej Bieniek
152e5254e2 Use Shelly main device area as suggested area for sub-devices (#146810) 2025-06-14 13:53:51 -04:00
starkillerOG
3f8f7cd578 Bump motion blinds to 0.6.28 (#146831) 2025-06-14 19:01:41 +02:00
Chris Talkington
ed3fb62ffc Update rokuecp to 0.19.5 (#146788) 2025-06-14 18:49:16 +02:00
J. Nick Koston
1d14e1f018 Bump aiohttp to 3.12.13 (#146830)
changelog: https://github.com/aio-libs/aiohttp/compare/v3.12.12...v3.12.13

Likely does not affect us at all but just in case, tagging
2025-06-14 17:13:20 +01:00
hahn-th
2ac8901a0d Improve code quality in async_setup_entry of switches in homematicip_cloud (#146816)
improve setup of switches
2025-06-14 17:26:08 +02:00
Joris Pelgröm
6204fd5363 Add polling to LetPot coordinator (#146823)
- Adds polling (update_interval) to the coordinator for the LetPot integration. Push remains the primary update mechanism for all entities, but:
   - Polling makes entities go unavailable when the device can't be reached, which otherwise won't happen.
   - Pump changes do not always trigger a status push by the device (not sure why), polling makes the integration catch up to reality.
2025-06-14 16:24:48 +02:00
Brett Adams
ce52ef64db Bump tesla-fleet-api to 1.1.3 (#146793) 2025-06-14 08:39:27 -05:00
Paulus Schoutsen
059c12798d Drop user prompt from LLMContext (#146787) 2025-06-13 22:01:39 -04:00
epenet
56aa809074 Simplify google_photos service actions (#146744) 2025-06-13 18:57:11 -07:00
Marc Mueller
3d2dca5f0c Adjust scripts for compatibility with Python 3.14 (#146774) 2025-06-13 21:54:25 -04:00
starkillerOG
cdb2b407be Add Reolink baby cry sensitivity (#146773)
* Add baby cry sensitivity

* Adjust tests
2025-06-14 00:11:13 +01:00
Ian
186ed451a9 Bump nextbus client to 2.3.0 (#146780) 2025-06-14 00:09:29 +01:00
hahn-th
761a0877e6 Fix throttling issue in HomematicIP Cloud (#146683)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-06-13 19:57:03 +02:00
J. Nick Koston
91bc56b15c Bump aiodns to 3.5.0 (#146758) 2025-06-13 19:12:52 +02:00
Paulus Schoutsen
d1e2c62433 Remove unnecessary string formatting. (#146762) 2025-06-13 10:10:47 -07:00
Duco Sebel
524c16fbe1 Bumb python-homewizard-energy to 9.1.1 (#146723)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-06-13 18:59:28 +02:00
Marc Mueller
2fdd3d66bc Update pydantic to 2.11.6 (#146745) 2025-06-13 18:53:05 +02:00
Simone Chemelli
6a1e3b60ee Filter speak notify entity for WHA devices in Alexa Devices (#146688) 2025-06-13 18:49:18 +02:00
DeerMaximum
434cd95a66 Use ConfigEntry.runtime_data to store runtime data in NINA (#146754)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-06-13 18:47:21 +02:00
Vasilis Valatsos
1a5bc2c7e0 Drop HostKeyAlgorithms in aruba (#146619) 2025-06-13 18:47:07 +02:00
epenet
a66e9a1a2c Simplify reolink service actions (#146751) 2025-06-13 18:08:59 +02:00
Paulus Schoutsen
d880ce6bb4 Clean up Google conversation entity (#146736) 2025-06-13 10:30:14 -04:00
Paulus Schoutsen
c96023dcae Clean up Anthropic conversation entity (#146737) 2025-06-13 10:29:26 -04:00
Paulus Schoutsen
2f8ad4d5bf Clean up Ollama conversation entity (#146738) 2025-06-13 10:29:19 -04:00
Marc Mueller
038a848d53 Fix androidtv isfile patcher in tests (#146696) 2025-06-13 16:25:09 +02:00
epenet
ff17d79e73 Bump wakeonlan to 3.1.0 (#146655)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-06-13 08:58:44 -05:00
tronikos
a8201009f3 Fix opower to work with aiohttp>=3.12.7 by disabling cookie quoting (#146697)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-06-13 08:58:27 -05:00
Simone Chemelli
a349653282 Bump aioamazondevices to 3.1.2 (#146690) 2025-06-13 16:53:18 +03:00
epenet
355ee1178e Add callback decorator to async_setup_services (#146729) 2025-06-13 15:16:55 +02:00
Marc Mueller
30c5df3eaa Adjust core create_task tests with event_loop patch (#146699) 2025-06-13 15:16:28 +02:00
Marc Mueller
10874af19a Ignore lingering pycares shutdown thread (#146733) 2025-06-13 15:09:37 +02:00
Marc Mueller
704118b3d0 Remove unnecessary patch from toon tests (#146691) 2025-06-13 12:53:33 +02:00
Marc Mueller
7c575d0316 Fix asuswrt test patch (#146692) 2025-06-13 12:52:56 +02:00
starkillerOG
ab3f11bfe7 Add Reolink IR brightness entity (#146717) 2025-06-13 12:50:12 +02:00
Allen Porter
f0357539ad Add myself as a remote calendar code owner (#146703) 2025-06-13 12:48:24 +02:00
Allen Porter
e70a2dd257 Partial revert of update to remote calendar to fix issue where calendar does not update (#146702)
Partial revert
2025-06-13 12:47:56 +02:00
Allen Porter
5ef99a15a5 Revert scan interval change in local calendar (#146700) 2025-06-13 12:46:01 +02:00
Marc Mueller
6421973cd6 Remove unnecessary patch from panel_custom tests (#146695) 2025-06-13 10:46:26 +02:00
Marc Mueller
7201171eb5 Replace unnecessary pydantic import in matrix tests (#146693) 2025-06-13 10:45:54 +02:00
Abílio Costa
1fb438fa6c Add missing mock value to Reolink test (#146689) 2025-06-13 07:43:21 +02:00
144 changed files with 3167 additions and 1214 deletions

6
CODEOWNERS generated
View File

@@ -57,6 +57,8 @@ build.json @home-assistant/supervisor
/tests/components/aemet/ @Noltari
/homeassistant/components/agent_dvr/ @ispysoftware
/tests/components/agent_dvr/ @ispysoftware
/homeassistant/components/ai_task/ @home-assistant/core
/tests/components/ai_task/ @home-assistant/core
/homeassistant/components/air_quality/ @home-assistant/core
/tests/components/air_quality/ @home-assistant/core
/homeassistant/components/airgradient/ @airgradienthq @joostlek
@@ -1274,8 +1276,8 @@ build.json @home-assistant/supervisor
/tests/components/rehlko/ @bdraco @peterager
/homeassistant/components/remote/ @home-assistant/core
/tests/components/remote/ @home-assistant/core
/homeassistant/components/remote_calendar/ @Thomas55555
/tests/components/remote_calendar/ @Thomas55555
/homeassistant/components/remote_calendar/ @Thomas55555 @allenporter
/tests/components/remote_calendar/ @Thomas55555 @allenporter
/homeassistant/components/renault/ @epenet
/tests/components/renault/ @epenet
/homeassistant/components/renson/ @jimmyd-be

View File

@@ -6,7 +6,7 @@ from jaraco.abode.exceptions import Exception as AbodeException
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
@@ -70,6 +70,7 @@ def _trigger_automation(call: ServiceCall) -> None:
dispatcher_send(call.hass, signal)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Home Assistant services."""

View File

@@ -0,0 +1,99 @@
"""Integration to offer AI tasks to Home Assistant."""
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, storage
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
from .const import DATA_COMPONENT, DATA_PREFERENCES, DOMAIN
from .entity import AITaskEntity
from .http import async_setup as async_setup_conversation_http
from .task import GenTextTask, GenTextTaskResult, async_generate_text
__all__ = [
"DOMAIN",
"AITaskEntity",
"GenTextTask",
"GenTextTaskResult",
"async_generate_text",
"async_setup",
"async_setup_entry",
"async_unload_entry",
]
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Register the process service."""
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
await hass.data[DATA_PREFERENCES].async_load()
async_setup_conversation_http(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
class AITaskPreferences:
"""AI Task preferences."""
gen_text_entity_id: str | None = None
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the preferences."""
self._store: storage.Store[dict[str, str | None]] = storage.Store(
hass, 1, DOMAIN
)
async def async_load(self) -> None:
"""Load the data from the store."""
data = await self._store.async_load()
if data is None:
return
self.gen_text_entity_id = data.get("gen_text_entity_id")
@callback
def async_set_preferences(
self,
*,
gen_text_entity_id: str | None | UndefinedType = UNDEFINED,
) -> None:
"""Set the preferences."""
changed = False
for key, value in (("gen_text_entity_id", gen_text_entity_id),):
if value is not UNDEFINED:
if getattr(self, key) != value:
setattr(self, key, value)
changed = True
if not changed:
return
self._store.async_delay_save(
lambda: {
"gen_text_entity_id": self.gen_text_entity_id,
},
10,
)
@callback
def as_dict(self) -> dict[str, str | None]:
"""Get the current preferences."""
return {
"gen_text_entity_id": self.gen_text_entity_id,
}

View File

@@ -0,0 +1,21 @@
"""Constants for the AI Task integration."""
from __future__ import annotations
from typing import TYPE_CHECKING
from homeassistant.util.hass_dict import HassKey
if TYPE_CHECKING:
from homeassistant.helpers.entity_component import EntityComponent
from . import AITaskPreferences
from .entity import AITaskEntity
DOMAIN = "ai_task"
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
DEFAULT_SYSTEM_PROMPT = (
"You are a Home Assistant expert and help users with their tasks."
)

View File

@@ -0,0 +1,95 @@
"""Entity for the AI Task integration."""
from collections.abc import AsyncGenerator
import contextlib
from typing import final
from homeassistant.components.conversation import (
ChatLog,
UserContent,
async_get_chat_log,
)
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.helpers import llm
from homeassistant.helpers.chat_session import async_get_chat_session
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.util import dt as dt_util
from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN
from .task import GenTextTask, GenTextTaskResult
class AITaskEntity(RestoreEntity):
"""Entity that supports conversations."""
_attr_should_poll = False
__last_activity: str | None = None
@property
@final
def state(self) -> str | None:
"""Return the state of the entity."""
if self.__last_activity is None:
return None
return self.__last_activity
async def async_internal_added_to_hass(self) -> None:
"""Call when the entity is added to hass."""
await super().async_internal_added_to_hass()
state = await self.async_get_last_state()
if (
state is not None
and state.state is not None
and state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
):
self.__last_activity = state.state
@final
@contextlib.asynccontextmanager
async def _async_get_ai_task_chat_log(
self,
task: GenTextTask,
) -> AsyncGenerator[ChatLog]:
"""Context manager used to manage the ChatLog used during an AI Task."""
# pylint: disable-next=contextmanager-generator-missing-cleanup
with (
async_get_chat_session(self.hass) as session,
async_get_chat_log(
self.hass,
session,
None,
) as chat_log,
):
await chat_log.async_provide_llm_data(
llm.LLMContext(
platform=self.platform.domain,
context=None,
language=None,
assistant=DOMAIN,
device_id=None,
),
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
)
chat_log.async_add_user_content(UserContent(task.instructions))
yield chat_log
@final
async def internal_async_generate_text(
self,
task: GenTextTask,
) -> GenTextTaskResult:
"""Run a gen text task."""
self.__last_activity = dt_util.utcnow().isoformat()
self.async_write_ha_state()
async with self._async_get_ai_task_chat_log(task) as chat_log:
return await self._async_generate_text(task, chat_log)
async def _async_generate_text(
self,
task: GenTextTask,
chat_log: ChatLog,
) -> GenTextTaskResult:
"""Handle a gen text task."""
raise NotImplementedError

View File

@@ -0,0 +1,82 @@
"""HTTP endpoint for AI Task integration."""
from typing import Any
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.core import HomeAssistant, callback
from .const import DATA_PREFERENCES
from .task import async_generate_text
@callback
def async_setup(hass: HomeAssistant) -> None:
"""Set up the HTTP API for the conversation integration."""
websocket_api.async_register_command(hass, websocket_generate_text)
websocket_api.async_register_command(hass, websocket_get_preferences)
websocket_api.async_register_command(hass, websocket_set_preferences)
@websocket_api.websocket_command(
{
vol.Required("type"): "ai_task/generate_text",
vol.Required("task_name"): str,
vol.Optional("entity_id"): str,
vol.Required("instructions"): str,
}
)
@websocket_api.require_admin
@websocket_api.async_response
async def websocket_generate_text(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Run a generate text task."""
msg.pop("type")
msg_id = msg.pop("id")
try:
result = await async_generate_text(hass=hass, **msg)
except ValueError as err:
connection.send_error(msg_id, websocket_api.const.ERR_UNKNOWN_ERROR, str(err))
return
connection.send_result(msg_id, result.as_dict())
@websocket_api.websocket_command(
{
vol.Required("type"): "ai_task/preferences/get",
}
)
@callback
def websocket_get_preferences(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Get AI task preferences."""
preferences = hass.data[DATA_PREFERENCES]
connection.send_result(msg["id"], preferences.as_dict())
@websocket_api.websocket_command(
{
vol.Required("type"): "ai_task/preferences/set",
vol.Optional("gen_text_entity_id"): vol.Any(str, None),
}
)
@websocket_api.require_admin
@callback
def websocket_set_preferences(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Set AI task preferences."""
preferences = hass.data[DATA_PREFERENCES]
msg.pop("type")
msg_id = msg.pop("id")
preferences.async_set_preferences(**msg)
connection.send_result(msg_id, preferences.as_dict())

View File

@@ -0,0 +1,9 @@
{
"domain": "ai_task",
"name": "AI Task",
"codeowners": ["@home-assistant/core"],
"dependencies": ["conversation"],
"documentation": "https://www.home-assistant.io/integrations/ai_task",
"integration_type": "system",
"quality_scale": "internal"
}

View File

@@ -0,0 +1,68 @@
"""AI tasks to be handled by agents."""
from __future__ import annotations
from dataclasses import dataclass
from homeassistant.core import HomeAssistant
from .const import DATA_COMPONENT, DATA_PREFERENCES
async def async_generate_text(
hass: HomeAssistant,
*,
task_name: str,
entity_id: str | None = None,
instructions: str,
) -> GenTextTaskResult:
"""Run a task in the AI Task integration."""
if entity_id is None:
entity_id = hass.data[DATA_PREFERENCES].gen_text_entity_id
if entity_id is None:
raise ValueError("No entity_id provided and no preferred entity set")
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
if entity is None:
raise ValueError(f"AI Task entity {entity_id} not found")
return await entity.internal_async_generate_text(
GenTextTask(
name=task_name,
instructions=instructions,
)
)
@dataclass(slots=True)
class GenTextTask:
"""Gen text task to be processed."""
name: str
"""Name of the task."""
instructions: str
"""Instructions on what needs to be done."""
def __str__(self) -> str:
"""Return task as a string."""
return f"<GenTextTask {self.name}: {id(self)}>"
@dataclass(slots=True)
class GenTextTaskResult:
"""Result of gen text task."""
conversation_id: str
"""Unique identifier for the conversation."""
result: str
"""Result of the task."""
def as_dict(self) -> dict[str, str]:
"""Return result as a dict."""
return {
"conversation_id": self.conversation_id,
"result": self.result,
}

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "bronze",
"requirements": ["aioamazondevices==3.0.6"]
"requirements": ["aioamazondevices==3.1.4"]
}

View File

@@ -7,6 +7,7 @@ from dataclasses import dataclass
from typing import Any, Final
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
from homeassistant.core import HomeAssistant
@@ -22,6 +23,7 @@ PARALLEL_UPDATES = 1
class AmazonNotifyEntityDescription(NotifyEntityDescription):
"""Alexa Devices notify entity description."""
is_supported: Callable[[AmazonDevice], bool] = lambda _device: True
method: Callable[[AmazonEchoApi, AmazonDevice, str], Awaitable[None]]
subkey: str
@@ -31,6 +33,7 @@ NOTIFY: Final = (
key="speak",
translation_key="speak",
subkey="AUDIO_PLAYER",
is_supported=lambda _device: _device.device_family != SPEAKER_GROUP_FAMILY,
method=lambda api, device, message: api.call_alexa_speak(device, message),
),
AmazonNotifyEntityDescription(
@@ -58,6 +61,7 @@ async def async_setup_entry(
for sensor_desc in NOTIFY
for serial_num in coordinator.data
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
and sensor_desc.is_supported(coordinator.data[serial_num])
)

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from homeassistant.auth.models import User
from homeassistant.auth.permissions.const import POLICY_CONTROL
from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import Unauthorized, UnknownUser
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.service import async_extract_entity_ids
@@ -15,6 +15,7 @@ from .const import CAMERAS, DATA_AMCREST, DOMAIN
from .helpers import service_signal
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up the Amcrest IP Camera services."""

View File

@@ -366,15 +366,35 @@ class AnthropicConversationEntity(
options = self.entry.options
try:
await chat_log.async_update_llm_data(
DOMAIN,
user_input,
await chat_log.async_provide_llm_data(
user_input.as_llm_context(DOMAIN),
options.get(CONF_LLM_HASS_API),
options.get(CONF_PROMPT),
user_input.extra_system_prompt,
)
except conversation.ConverseError as err:
return err.as_conversation_result()
await self._async_handle_chat_log(chat_log)
response_content = chat_log.content[-1]
if not isinstance(response_content, conversation.AssistantContent):
raise TypeError("Last message must be an assistant message")
intent_response = intent.IntentResponse(language=user_input.language)
intent_response.async_set_speech(response_content.content or "")
return conversation.ConversationResult(
response=intent_response,
conversation_id=chat_log.conversation_id,
continue_conversation=chat_log.continue_conversation,
)
async def _async_handle_chat_log(
self,
chat_log: conversation.ChatLog,
) -> None:
"""Generate an answer for the chat log."""
options = self.entry.options
tools: list[ToolParam] | None = None
if chat_log.llm_api:
tools = [
@@ -424,7 +444,7 @@ class AnthropicConversationEntity(
[
content
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id,
self.entity_id,
_transform_stream(chat_log, stream, messages),
)
if not isinstance(content, conversation.AssistantContent)
@@ -435,17 +455,6 @@ class AnthropicConversationEntity(
if not chat_log.unresponded_tool_results:
break
response_content = chat_log.content[-1]
if not isinstance(response_content, conversation.AssistantContent):
raise TypeError("Last message must be an assistant message")
intent_response = intent.IntentResponse(language=user_input.language)
intent_response.async_set_speech(response_content.content or "")
return conversation.ConversationResult(
response=intent_response,
conversation_id=chat_log.conversation_id,
continue_conversation=chat_log.continue_conversation,
)
async def _async_entry_update_listener(
self, hass: HomeAssistant, entry: ConfigEntry
) -> None:

View File

@@ -89,7 +89,7 @@ class ArubaDeviceScanner(DeviceScanner):
def get_aruba_data(self) -> dict[str, dict[str, str]] | None:
"""Retrieve data from Aruba Access Point and return parsed result."""
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
connect = f"ssh {self.username}@{self.host}"
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
query = ssh.expect(
[

View File

@@ -14,12 +14,11 @@ import voluptuous as vol
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError, TemplateError
from homeassistant.helpers import chat_session, intent, llm, template
from homeassistant.helpers import chat_session, frame, intent, llm, template
from homeassistant.util.hass_dict import HassKey
from homeassistant.util.json import JsonObjectType
from . import trace
from .const import DOMAIN
from .models import ConversationInput, ConversationResult
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
@@ -359,7 +358,7 @@ class ChatLog:
self,
llm_context: llm.LLMContext,
prompt: str,
language: str,
language: str | None,
user_name: str | None = None,
) -> str:
try:
@@ -373,7 +372,7 @@ class ChatLog:
)
except TemplateError as err:
LOGGER.error("Error rendering prompt: %s", err)
intent_response = intent.IntentResponse(language=language)
intent_response = intent.IntentResponse(language=language or "")
intent_response.async_set_error(
intent.IntentResponseErrorCode.UNKNOWN,
"Sorry, I had a problem with my template",
@@ -392,15 +391,25 @@ class ChatLog:
user_llm_prompt: str | None = None,
) -> None:
"""Set the LLM system prompt."""
llm_context = llm.LLMContext(
platform=conversing_domain,
context=user_input.context,
user_prompt=user_input.text,
language=user_input.language,
assistant=DOMAIN,
device_id=user_input.device_id,
frame.report_usage(
"ChatLog.async_update_llm_data",
breaks_in_ha_version="2026.1",
)
return await self.async_provide_llm_data(
llm_context=user_input.as_llm_context(conversing_domain),
user_llm_hass_api=user_llm_hass_api,
user_llm_prompt=user_llm_prompt,
user_extra_system_prompt=user_input.extra_system_prompt,
)
async def async_provide_llm_data(
self,
llm_context: llm.LLMContext,
user_llm_hass_api: str | list[str] | None = None,
user_llm_prompt: str | None = None,
user_extra_system_prompt: str | None = None,
) -> None:
"""Set the LLM system prompt."""
llm_api: llm.APIInstance | None = None
if user_llm_hass_api:
@@ -414,10 +423,12 @@ class ChatLog:
LOGGER.error(
"Error getting LLM API %s for %s: %s",
user_llm_hass_api,
conversing_domain,
llm_context.platform,
err,
)
intent_response = intent.IntentResponse(language=user_input.language)
intent_response = intent.IntentResponse(
language=llm_context.language or ""
)
intent_response.async_set_error(
intent.IntentResponseErrorCode.UNKNOWN,
"Error preparing LLM API",
@@ -431,10 +442,10 @@ class ChatLog:
user_name: str | None = None
if (
user_input.context
and user_input.context.user_id
llm_context.context
and llm_context.context.user_id
and (
user := await self.hass.auth.async_get_user(user_input.context.user_id)
user := await self.hass.auth.async_get_user(llm_context.context.user_id)
)
):
user_name = user.name
@@ -444,7 +455,7 @@ class ChatLog:
await self._async_expand_prompt_template(
llm_context,
(user_llm_prompt or llm.DEFAULT_INSTRUCTIONS_PROMPT),
user_input.language,
llm_context.language,
user_name,
)
)
@@ -456,14 +467,14 @@ class ChatLog:
await self._async_expand_prompt_template(
llm_context,
llm.BASE_PROMPT,
user_input.language,
llm_context.language,
user_name,
)
)
if extra_system_prompt := (
# Take new system prompt if one was given
user_input.extra_system_prompt or self.extra_system_prompt
user_extra_system_prompt or self.extra_system_prompt
):
prompt_parts.append(extra_system_prompt)

View File

@@ -7,7 +7,9 @@ from dataclasses import dataclass
from typing import Any, Literal
from homeassistant.core import Context
from homeassistant.helpers import intent
from homeassistant.helpers import intent, llm
from .const import DOMAIN
@dataclass(frozen=True)
@@ -56,6 +58,16 @@ class ConversationInput:
"extra_system_prompt": self.extra_system_prompt,
}
def as_llm_context(self, conversing_domain: str) -> llm.LLMContext:
"""Return input as an LLM context."""
return llm.LLMContext(
platform=conversing_domain,
context=self.context,
language=self.language,
assistant=DOMAIN,
device_id=self.device_id,
)
@dataclass(slots=True)
class ConversationResult:

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dnsip",
"iot_class": "cloud_polling",
"requirements": ["aiodns==3.4.0"]
"requirements": ["aiodns==3.5.0"]
}

View File

@@ -10,7 +10,7 @@ import threading
import requests
import voluptuous as vol
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.util import raise_if_invalid_filename, raise_if_invalid_path
@@ -141,6 +141,7 @@ def download_file(service: ServiceCall) -> None:
threading.Thread(target=do_download).start()
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register the services for the downloader component."""
async_register_admin_service(

View File

@@ -63,6 +63,7 @@ def _set_time_service(service: ServiceCall) -> None:
_async_get_elk_panel(service).set_time(dt_util.now())
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Create ElkM1 services."""

View File

@@ -6,7 +6,6 @@ from typing import TYPE_CHECKING
from eq3btsmart import Thermostat
from eq3btsmart.exceptions import Eq3Exception
from eq3btsmart.thermostat_config import ThermostatConfig
from homeassistant.components import bluetooth
from homeassistant.config_entries import ConfigEntry
@@ -53,12 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
f"[{eq3_config.mac_address}] Device could not be found"
)
thermostat = Thermostat(
thermostat_config=ThermostatConfig(
mac_address=mac_address,
),
ble_device=device,
)
thermostat = Thermostat(mac_address=device) # type: ignore[arg-type]
entry.runtime_data = Eq3ConfigEntryData(
eq3_config=eq3_config, thermostat=thermostat

View File

@@ -2,7 +2,6 @@
from collections.abc import Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING
from eq3btsmart.models import Status
@@ -80,7 +79,4 @@ class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity):
def is_on(self) -> bool:
"""Return the state of the binary sensor."""
if TYPE_CHECKING:
assert self._thermostat.status is not None
return self.entity_description.value_func(self._thermostat.status)

View File

@@ -1,9 +1,16 @@
"""Platform for eQ-3 climate entities."""
from datetime import timedelta
import logging
from typing import Any
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode
from eq3btsmart.const import (
EQ3_DEFAULT_AWAY_TEMP,
EQ3_MAX_TEMP,
EQ3_OFF_TEMP,
Eq3OperationMode,
Eq3Preset,
)
from eq3btsmart.exceptions import Eq3Exception
from homeassistant.components.climate import (
@@ -20,9 +27,11 @@ from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
import homeassistant.util.dt as dt_util
from . import Eq3ConfigEntry
from .const import (
DEFAULT_AWAY_HOURS,
EQ_TO_HA_HVAC,
HA_TO_EQ_HVAC,
CurrentTemperatureSelector,
@@ -57,8 +66,8 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
| ClimateEntityFeature.TURN_ON
)
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_min_temp = EQ3BT_OFF_TEMP
_attr_max_temp = EQ3BT_MAX_TEMP
_attr_min_temp = EQ3_OFF_TEMP
_attr_max_temp = EQ3_MAX_TEMP
_attr_precision = PRECISION_HALVES
_attr_hvac_modes = list(HA_TO_EQ_HVAC.keys())
_attr_preset_modes = list(Preset)
@@ -70,38 +79,21 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
_target_temperature: float | None = None
@callback
def _async_on_updated(self) -> None:
"""Handle updated data from the thermostat."""
if self._thermostat.status is not None:
self._async_on_status_updated()
if self._thermostat.device_data is not None:
self._async_on_device_updated()
super()._async_on_updated()
@callback
def _async_on_status_updated(self) -> None:
def _async_on_status_updated(self, data: Any) -> None:
"""Handle updated status from the thermostat."""
if self._thermostat.status is None:
return
self._target_temperature = self._thermostat.status.target_temperature.value
self._target_temperature = self._thermostat.status.target_temperature
self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode]
self._attr_current_temperature = self._get_current_temperature()
self._attr_target_temperature = self._get_target_temperature()
self._attr_preset_mode = self._get_current_preset_mode()
self._attr_hvac_action = self._get_current_hvac_action()
super()._async_on_status_updated(data)
@callback
def _async_on_device_updated(self) -> None:
def _async_on_device_updated(self, data: Any) -> None:
"""Handle updated device data from the thermostat."""
if self._thermostat.device_data is None:
return
device_registry = dr.async_get(self.hass)
if device := device_registry.async_get_device(
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
@@ -109,8 +101,9 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
device_registry.async_update_device(
device.id,
sw_version=str(self._thermostat.device_data.firmware_version),
serial_number=self._thermostat.device_data.device_serial.value,
serial_number=self._thermostat.device_data.device_serial,
)
super()._async_on_device_updated(data)
def _get_current_temperature(self) -> float | None:
"""Return the current temperature."""
@@ -119,17 +112,11 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
case CurrentTemperatureSelector.NOTHING:
return None
case CurrentTemperatureSelector.VALVE:
if self._thermostat.status is None:
return None
return float(self._thermostat.status.valve_temperature)
case CurrentTemperatureSelector.UI:
return self._target_temperature
case CurrentTemperatureSelector.DEVICE:
if self._thermostat.status is None:
return None
return float(self._thermostat.status.target_temperature.value)
return float(self._thermostat.status.target_temperature)
case CurrentTemperatureSelector.ENTITY:
state = self.hass.states.get(self._eq3_config.external_temp_sensor)
if state is not None:
@@ -147,16 +134,12 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
case TargetTemperatureSelector.TARGET:
return self._target_temperature
case TargetTemperatureSelector.LAST_REPORTED:
if self._thermostat.status is None:
return None
return float(self._thermostat.status.target_temperature.value)
return float(self._thermostat.status.target_temperature)
def _get_current_preset_mode(self) -> str:
"""Return the current preset mode."""
if (status := self._thermostat.status) is None:
return PRESET_NONE
status = self._thermostat.status
if status.is_window_open:
return Preset.WINDOW_OPEN
if status.is_boost:
@@ -165,7 +148,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
return Preset.LOW_BATTERY
if status.is_away:
return Preset.AWAY
if status.operation_mode is OperationMode.ON:
if status.operation_mode is Eq3OperationMode.ON:
return Preset.OPEN
if status.presets is None:
return PRESET_NONE
@@ -179,10 +162,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
def _get_current_hvac_action(self) -> HVACAction:
"""Return the current hvac action."""
if (
self._thermostat.status is None
or self._thermostat.status.operation_mode is OperationMode.OFF
):
if self._thermostat.status.operation_mode is Eq3OperationMode.OFF:
return HVACAction.OFF
if self._thermostat.status.valve == 0:
return HVACAction.IDLE
@@ -227,7 +207,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
"""Set new target hvac mode."""
if hvac_mode is HVACMode.OFF:
await self.async_set_temperature(temperature=EQ3BT_OFF_TEMP)
await self.async_set_temperature(temperature=EQ3_OFF_TEMP)
try:
await self._thermostat.async_set_mode(HA_TO_EQ_HVAC[hvac_mode])
@@ -241,10 +221,11 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
case Preset.BOOST:
await self._thermostat.async_set_boost(True)
case Preset.AWAY:
await self._thermostat.async_set_away(True)
away_until = dt_util.now() + timedelta(hours=DEFAULT_AWAY_HOURS)
await self._thermostat.async_set_away(away_until, EQ3_DEFAULT_AWAY_TEMP)
case Preset.ECO:
await self._thermostat.async_set_preset(Eq3Preset.ECO)
case Preset.COMFORT:
await self._thermostat.async_set_preset(Eq3Preset.COMFORT)
case Preset.OPEN:
await self._thermostat.async_set_mode(OperationMode.ON)
await self._thermostat.async_set_mode(Eq3OperationMode.ON)

View File

@@ -2,7 +2,7 @@
from enum import Enum
from eq3btsmart.const import OperationMode
from eq3btsmart.const import Eq3OperationMode
from homeassistant.components.climate import (
PRESET_AWAY,
@@ -34,17 +34,17 @@ ENTITY_KEY_AWAY_UNTIL = "away_until"
GET_DEVICE_TIMEOUT = 5 # seconds
EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = {
OperationMode.OFF: HVACMode.OFF,
OperationMode.ON: HVACMode.HEAT,
OperationMode.AUTO: HVACMode.AUTO,
OperationMode.MANUAL: HVACMode.HEAT,
EQ_TO_HA_HVAC: dict[Eq3OperationMode, HVACMode] = {
Eq3OperationMode.OFF: HVACMode.OFF,
Eq3OperationMode.ON: HVACMode.HEAT,
Eq3OperationMode.AUTO: HVACMode.AUTO,
Eq3OperationMode.MANUAL: HVACMode.HEAT,
}
HA_TO_EQ_HVAC = {
HVACMode.OFF: OperationMode.OFF,
HVACMode.AUTO: OperationMode.AUTO,
HVACMode.HEAT: OperationMode.MANUAL,
HVACMode.OFF: Eq3OperationMode.OFF,
HVACMode.AUTO: Eq3OperationMode.AUTO,
HVACMode.HEAT: Eq3OperationMode.MANUAL,
}
@@ -81,6 +81,7 @@ class TargetTemperatureSelector(str, Enum):
DEFAULT_CURRENT_TEMP_SELECTOR = CurrentTemperatureSelector.DEVICE
DEFAULT_TARGET_TEMP_SELECTOR = TargetTemperatureSelector.TARGET
DEFAULT_SCAN_INTERVAL = 10 # seconds
DEFAULT_AWAY_HOURS = 30 * 24
SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected"
SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected"

View File

@@ -1,5 +1,10 @@
"""Base class for all eQ-3 entities."""
from typing import Any
from eq3btsmart import Eq3Exception
from eq3btsmart.const import Eq3Event
from homeassistant.core import callback
from homeassistant.helpers.device_registry import (
CONNECTION_BLUETOOTH,
@@ -45,7 +50,15 @@ class Eq3Entity(Entity):
async def async_added_to_hass(self) -> None:
"""Run when entity about to be added to hass."""
self._thermostat.register_update_callback(self._async_on_updated)
self._thermostat.register_callback(
Eq3Event.DEVICE_DATA_RECEIVED, self._async_on_device_updated
)
self._thermostat.register_callback(
Eq3Event.STATUS_RECEIVED, self._async_on_status_updated
)
self._thermostat.register_callback(
Eq3Event.SCHEDULE_RECEIVED, self._async_on_status_updated
)
self.async_on_remove(
async_dispatcher_connect(
@@ -65,10 +78,25 @@ class Eq3Entity(Entity):
async def async_will_remove_from_hass(self) -> None:
"""Run when entity will be removed from hass."""
self._thermostat.unregister_update_callback(self._async_on_updated)
self._thermostat.unregister_callback(
Eq3Event.DEVICE_DATA_RECEIVED, self._async_on_device_updated
)
self._thermostat.unregister_callback(
Eq3Event.STATUS_RECEIVED, self._async_on_status_updated
)
self._thermostat.unregister_callback(
Eq3Event.SCHEDULE_RECEIVED, self._async_on_status_updated
)
def _async_on_updated(self) -> None:
"""Handle updated data from the thermostat."""
@callback
def _async_on_status_updated(self, data: Any) -> None:
"""Handle updated status from the thermostat."""
self.async_write_ha_state()
@callback
def _async_on_device_updated(self, data: Any) -> None:
"""Handle updated device data from the thermostat."""
self.async_write_ha_state()
@@ -90,4 +118,9 @@ class Eq3Entity(Entity):
def available(self) -> bool:
"""Whether the entity is available."""
return self._thermostat.status is not None and self._attr_available
try:
_ = self._thermostat.status
except Eq3Exception:
return False
return self._attr_available

View File

@@ -22,5 +22,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["eq3btsmart"],
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.16.0"]
"requirements": ["eq3btsmart==2.1.0", "bleak-esphome==2.16.0"]
}

View File

@@ -1,17 +1,12 @@
"""Platform for eq3 number entities."""
from collections.abc import Awaitable, Callable
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from typing import TYPE_CHECKING
from eq3btsmart import Thermostat
from eq3btsmart.const import (
EQ3BT_MAX_OFFSET,
EQ3BT_MAX_TEMP,
EQ3BT_MIN_OFFSET,
EQ3BT_MIN_TEMP,
)
from eq3btsmart.models import Presets
from eq3btsmart.const import EQ3_MAX_OFFSET, EQ3_MAX_TEMP, EQ3_MIN_OFFSET, EQ3_MIN_TEMP
from eq3btsmart.models import Presets, Status
from homeassistant.components.number import (
NumberDeviceClass,
@@ -42,7 +37,7 @@ class Eq3NumberEntityDescription(NumberEntityDescription):
value_func: Callable[[Presets], float]
value_set_func: Callable[
[Thermostat],
Callable[[float], Awaitable[None]],
Callable[[float], Coroutine[None, None, Status]],
]
mode: NumberMode = NumberMode.BOX
entity_category: EntityCategory | None = EntityCategory.CONFIG
@@ -51,44 +46,44 @@ class Eq3NumberEntityDescription(NumberEntityDescription):
NUMBER_ENTITY_DESCRIPTIONS = [
Eq3NumberEntityDescription(
key=ENTITY_KEY_COMFORT,
value_func=lambda presets: presets.comfort_temperature.value,
value_func=lambda presets: presets.comfort_temperature,
value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature,
translation_key=ENTITY_KEY_COMFORT,
native_min_value=EQ3BT_MIN_TEMP,
native_max_value=EQ3BT_MAX_TEMP,
native_min_value=EQ3_MIN_TEMP,
native_max_value=EQ3_MAX_TEMP,
native_step=EQ3BT_STEP,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
),
Eq3NumberEntityDescription(
key=ENTITY_KEY_ECO,
value_func=lambda presets: presets.eco_temperature.value,
value_func=lambda presets: presets.eco_temperature,
value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature,
translation_key=ENTITY_KEY_ECO,
native_min_value=EQ3BT_MIN_TEMP,
native_max_value=EQ3BT_MAX_TEMP,
native_min_value=EQ3_MIN_TEMP,
native_max_value=EQ3_MAX_TEMP,
native_step=EQ3BT_STEP,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
),
Eq3NumberEntityDescription(
key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
value_func=lambda presets: presets.window_open_temperature.value,
value_func=lambda presets: presets.window_open_temperature,
value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature,
translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
native_min_value=EQ3BT_MIN_TEMP,
native_max_value=EQ3BT_MAX_TEMP,
native_min_value=EQ3_MIN_TEMP,
native_max_value=EQ3_MAX_TEMP,
native_step=EQ3BT_STEP,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
),
Eq3NumberEntityDescription(
key=ENTITY_KEY_OFFSET,
value_func=lambda presets: presets.offset_temperature.value,
value_func=lambda presets: presets.offset_temperature,
value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset,
translation_key=ENTITY_KEY_OFFSET,
native_min_value=EQ3BT_MIN_OFFSET,
native_max_value=EQ3BT_MAX_OFFSET,
native_min_value=EQ3_MIN_OFFSET,
native_max_value=EQ3_MAX_OFFSET,
native_step=EQ3BT_STEP,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
@@ -96,7 +91,7 @@ NUMBER_ENTITY_DESCRIPTIONS = [
Eq3NumberEntityDescription(
key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration,
value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60,
value_func=lambda presets: presets.window_open_time.total_seconds() / 60,
translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
native_min_value=0,
native_max_value=60,
@@ -137,7 +132,6 @@ class Eq3NumberEntity(Eq3Entity, NumberEntity):
"""Return the state of the entity."""
if TYPE_CHECKING:
assert self._thermostat.status is not None
assert self._thermostat.status.presets is not None
return self.entity_description.value_func(self._thermostat.status.presets)
@@ -152,7 +146,7 @@ class Eq3NumberEntity(Eq3Entity, NumberEntity):
"""Return whether the entity is available."""
return (
self._thermostat.status is not None
super().available
and self._thermostat.status.presets is not None
and self._attr_available
)

View File

@@ -1,12 +1,12 @@
"""Voluptuous schemas for eq3btsmart."""
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_MIN_TEMP
from eq3btsmart.const import EQ3_MAX_TEMP, EQ3_MIN_TEMP
import voluptuous as vol
from homeassistant.const import CONF_MAC
from homeassistant.helpers import config_validation as cv
SCHEMA_TEMPERATURE = vol.Range(min=EQ3BT_MIN_TEMP, max=EQ3BT_MAX_TEMP)
SCHEMA_TEMPERATURE = vol.Range(min=EQ3_MIN_TEMP, max=EQ3_MAX_TEMP)
SCHEMA_DEVICE = vol.Schema({vol.Required(CONF_MAC): cv.string})
SCHEMA_MAC = vol.Schema(
{

View File

@@ -3,7 +3,6 @@
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from typing import TYPE_CHECKING
from eq3btsmart.models import Status
@@ -40,9 +39,7 @@ SENSOR_ENTITY_DESCRIPTIONS = [
Eq3SensorEntityDescription(
key=ENTITY_KEY_AWAY_UNTIL,
translation_key=ENTITY_KEY_AWAY_UNTIL,
value_func=lambda status: (
status.away_until.value if status.away_until else None
),
value_func=lambda status: (status.away_until if status.away_until else None),
device_class=SensorDeviceClass.DATE,
),
]
@@ -78,7 +75,4 @@ class Eq3SensorEntity(Eq3Entity, SensorEntity):
def native_value(self) -> int | datetime | None:
"""Return the value reported by the sensor."""
if TYPE_CHECKING:
assert self._thermostat.status is not None
return self.entity_description.value_func(self._thermostat.status)

View File

@@ -1,26 +1,45 @@
"""Platform for eq3 switch entities."""
from collections.abc import Awaitable, Callable
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any
from datetime import timedelta
from functools import partial
from typing import Any
from eq3btsmart import Thermostat
from eq3btsmart.const import EQ3_DEFAULT_AWAY_TEMP, Eq3OperationMode
from eq3btsmart.models import Status
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
import homeassistant.util.dt as dt_util
from . import Eq3ConfigEntry
from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK
from .const import (
DEFAULT_AWAY_HOURS,
ENTITY_KEY_AWAY,
ENTITY_KEY_BOOST,
ENTITY_KEY_LOCK,
)
from .entity import Eq3Entity
async def async_set_away(thermostat: Thermostat, enable: bool) -> Status:
"""Backport old async_set_away behavior."""
if not enable:
return await thermostat.async_set_mode(Eq3OperationMode.AUTO)
away_until = dt_util.now() + timedelta(hours=DEFAULT_AWAY_HOURS)
return await thermostat.async_set_away(away_until, EQ3_DEFAULT_AWAY_TEMP)
@dataclass(frozen=True, kw_only=True)
class Eq3SwitchEntityDescription(SwitchEntityDescription):
"""Entity description for eq3 switch entities."""
toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]]
toggle_func: Callable[[Thermostat], Callable[[bool], Coroutine[None, None, Status]]]
value_func: Callable[[Status], bool]
@@ -40,7 +59,7 @@ SWITCH_ENTITY_DESCRIPTIONS = [
Eq3SwitchEntityDescription(
key=ENTITY_KEY_AWAY,
translation_key=ENTITY_KEY_AWAY,
toggle_func=lambda thermostat: thermostat.async_set_away,
toggle_func=lambda thermostat: partial(async_set_away, thermostat),
value_func=lambda status: status.is_away,
),
]
@@ -88,7 +107,4 @@ class Eq3SwitchEntity(Eq3Entity, SwitchEntity):
def is_on(self) -> bool:
"""Return the state of the switch."""
if TYPE_CHECKING:
assert self._thermostat.status is not None
return self.entity_description.value_func(self._thermostat.status)

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
@@ -35,6 +35,7 @@ async def _async_service_handle(service: ServiceCall) -> None:
async_dispatcher_send(service.hass, SIGNAL_FFMPEG_RESTART, entity_ids)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register FFmpeg services."""

View File

@@ -11,6 +11,7 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.helpers import config_validation as cv
@@ -49,6 +50,7 @@ async def _send_text_command(call: ServiceCall) -> ServiceResponse:
return None
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Add the services for Google Assistant SDK."""

View File

@@ -45,7 +45,10 @@ CONF_IMAGE_FILENAME = "image_filename"
CONF_FILENAMES = "filenames"
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
PLATFORMS = (Platform.CONVERSATION,)
PLATFORMS = (
Platform.CONVERSATION,
Platform.TTS,
)
type GoogleGenerativeAIConfigEntry = ConfigEntry[Client]

View File

@@ -6,9 +6,11 @@ DOMAIN = "google_generative_ai_conversation"
LOGGER = logging.getLogger(__package__)
CONF_PROMPT = "prompt"
ATTR_MODEL = "model"
CONF_RECOMMENDED = "recommended"
CONF_CHAT_MODEL = "chat_model"
RECOMMENDED_CHAT_MODEL = "models/gemini-2.0-flash"
RECOMMENDED_TTS_MODEL = "gemini-2.5-flash-preview-tts"
CONF_TEMPERATURE = "temperature"
RECOMMENDED_TEMPERATURE = 1.0
CONF_TOP_P = "top_p"

View File

@@ -2,63 +2,18 @@
from __future__ import annotations
import codecs
from collections.abc import AsyncGenerator, Callable
from dataclasses import replace
from typing import Any, Literal, cast
from google.genai.errors import APIError, ClientError
from google.genai.types import (
AutomaticFunctionCallingConfig,
Content,
FunctionDeclaration,
GenerateContentConfig,
GenerateContentResponse,
GoogleSearch,
HarmCategory,
Part,
SafetySetting,
Schema,
Tool,
)
from voluptuous_openapi import convert
from typing import Literal
from homeassistant.components import assist_pipeline, conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, intent, llm
from homeassistant.helpers import intent
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import (
CONF_CHAT_MODEL,
CONF_DANGEROUS_BLOCK_THRESHOLD,
CONF_HARASSMENT_BLOCK_THRESHOLD,
CONF_HATE_BLOCK_THRESHOLD,
CONF_MAX_TOKENS,
CONF_PROMPT,
CONF_SEXUAL_BLOCK_THRESHOLD,
CONF_TEMPERATURE,
CONF_TOP_K,
CONF_TOP_P,
CONF_USE_GOOGLE_SEARCH_TOOL,
DOMAIN,
LOGGER,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_HARM_BLOCK_THRESHOLD,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_TOP_K,
RECOMMENDED_TOP_P,
)
# Max number of back and forth with the LLM to generate a response
MAX_TOOL_ITERATIONS = 10
ERROR_GETTING_RESPONSE = (
"Sorry, I had a problem getting a response from Google Generative AI."
)
from .const import CONF_PROMPT, DOMAIN, LOGGER
from .entity import ERROR_GETTING_RESPONSE, GoogleGenerativeAILLMBaseEntity
async def async_setup_entry(
@@ -71,265 +26,18 @@ async def async_setup_entry(
async_add_entities([agent])
SUPPORTED_SCHEMA_KEYS = {
# Gemini API does not support all of the OpenAPI schema
# SoT: https://ai.google.dev/api/caching#Schema
"type",
"format",
"description",
"nullable",
"enum",
"max_items",
"min_items",
"properties",
"required",
"items",
}
def _camel_to_snake(name: str) -> str:
"""Convert camel case to snake case."""
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
def _format_schema(schema: dict[str, Any]) -> Schema:
"""Format the schema to be compatible with Gemini API."""
if subschemas := schema.get("allOf"):
for subschema in subschemas: # Gemini API does not support allOf keys
if "type" in subschema: # Fallback to first subschema with 'type' field
return _format_schema(subschema)
return _format_schema(
subschemas[0]
) # Or, if not found, to any of the subschemas
result = {}
for key, val in schema.items():
key = _camel_to_snake(key)
if key not in SUPPORTED_SCHEMA_KEYS:
continue
if key == "type":
val = val.upper()
elif key == "format":
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
# formats that are not supported are ignored
if schema.get("type") == "string" and val not in ("enum", "date-time"):
continue
if schema.get("type") == "number" and val not in ("float", "double"):
continue
if schema.get("type") == "integer" and val not in ("int32", "int64"):
continue
if schema.get("type") not in ("string", "number", "integer"):
continue
elif key == "items":
val = _format_schema(val)
elif key == "properties":
val = {k: _format_schema(v) for k, v in val.items()}
result[key] = val
if result.get("enum") and result.get("type") != "STRING":
# enum is only allowed for STRING type. This is safe as long as the schema
# contains vol.Coerce for the respective type, for example:
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
result["type"] = "STRING"
result["enum"] = [str(item) for item in result["enum"]]
if result.get("type") == "OBJECT" and not result.get("properties"):
# An object with undefined properties is not supported by Gemini API.
# Fallback to JSON string. This will probably fail for most tools that want it,
# but we don't have a better fallback strategy so far.
result["properties"] = {"json": {"type": "STRING"}}
result["required"] = []
return cast(Schema, result)
def _format_tool(
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
) -> Tool:
"""Format tool specification."""
if tool.parameters.schema:
parameters = _format_schema(
convert(tool.parameters, custom_serializer=custom_serializer)
)
else:
parameters = None
return Tool(
function_declarations=[
FunctionDeclaration(
name=tool.name,
description=tool.description,
parameters=parameters,
)
]
)
def _escape_decode(value: Any) -> Any:
"""Recursively call codecs.escape_decode on all values."""
if isinstance(value, str):
return codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") # type: ignore[attr-defined]
if isinstance(value, list):
return [_escape_decode(item) for item in value]
if isinstance(value, dict):
return {k: _escape_decode(v) for k, v in value.items()}
return value
def _create_google_tool_response_parts(
parts: list[conversation.ToolResultContent],
) -> list[Part]:
"""Create Google tool response parts."""
return [
Part.from_function_response(
name=tool_result.tool_name, response=tool_result.tool_result
)
for tool_result in parts
]
def _create_google_tool_response_content(
content: list[conversation.ToolResultContent],
) -> Content:
"""Create a Google tool response content."""
return Content(
role="user",
parts=_create_google_tool_response_parts(content),
)
def _convert_content(
content: conversation.UserContent
| conversation.AssistantContent
| conversation.SystemContent,
) -> Content:
"""Convert HA content to Google content."""
if content.role != "assistant" or not content.tool_calls:
role = "model" if content.role == "assistant" else content.role
return Content(
role=role,
parts=[
Part.from_text(text=content.content if content.content else ""),
],
)
# Handle the Assistant content with tool calls.
assert type(content) is conversation.AssistantContent
parts: list[Part] = []
if content.content:
parts.append(Part.from_text(text=content.content))
if content.tool_calls:
parts.extend(
[
Part.from_function_call(
name=tool_call.tool_name,
args=_escape_decode(tool_call.tool_args),
)
for tool_call in content.tool_calls
]
)
return Content(role="model", parts=parts)
async def _transform_stream(
result: AsyncGenerator[GenerateContentResponse],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
new_message = True
try:
async for response in result:
LOGGER.debug("Received response chunk: %s", response)
chunk: conversation.AssistantContentDeltaDict = {}
if new_message:
chunk["role"] = "assistant"
new_message = False
# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
if response.prompt_feedback or not response.candidates:
reason = (
response.prompt_feedback.block_reason_message
if response.prompt_feedback
else "unknown"
)
raise HomeAssistantError(
f"The message got blocked due to content violations, reason: {reason}"
)
candidate = response.candidates[0]
if (
candidate.finish_reason is not None
and candidate.finish_reason != "STOP"
):
# The message ended due to a content error as explained in: https://ai.google.dev/api/generate-content#FinishReason
LOGGER.error(
"Error in Google Generative AI response: %s, see: https://ai.google.dev/api/generate-content#FinishReason",
candidate.finish_reason,
)
raise HomeAssistantError(
f"{ERROR_GETTING_RESPONSE} Reason: {candidate.finish_reason}"
)
response_parts = (
candidate.content.parts
if candidate.content is not None and candidate.content.parts is not None
else []
)
content = "".join([part.text for part in response_parts if part.text])
tool_calls = []
for part in response_parts:
if not part.function_call:
continue
tool_call = part.function_call
tool_name = tool_call.name if tool_call.name else ""
tool_args = _escape_decode(tool_call.args)
tool_calls.append(
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
)
if tool_calls:
chunk["tool_calls"] = tool_calls
chunk["content"] = content
yield chunk
except (
APIError,
ValueError,
) as err:
LOGGER.error("Error sending message: %s %s", type(err), err)
if isinstance(err, APIError):
message = err.message
else:
message = type(err).__name__
error = f"{ERROR_GETTING_RESPONSE}: {message}"
raise HomeAssistantError(error) from err
class GoogleGenerativeAIConversationEntity(
conversation.ConversationEntity, conversation.AbstractConversationAgent
conversation.ConversationEntity,
conversation.AbstractConversationAgent,
GoogleGenerativeAILLMBaseEntity,
):
"""Google Generative AI conversation agent."""
_attr_has_entity_name = True
_attr_name = None
_attr_supports_streaming = True
def __init__(self, entry: ConfigEntry) -> None:
"""Initialize the agent."""
self.entry = entry
self._genai_client = entry.runtime_data
self._attr_unique_id = entry.entry_id
self._attr_device_info = dr.DeviceInfo(
identifiers={(DOMAIN, entry.entry_id)},
name=entry.title,
manufacturer="Google",
model="Generative AI",
entry_type=dr.DeviceEntryType.SERVICE,
)
super().__init__(entry)
if self.entry.options.get(CONF_LLM_HASS_API):
self._attr_supported_features = (
conversation.ConversationEntityFeature.CONTROL
@@ -356,13 +64,6 @@ class GoogleGenerativeAIConversationEntity(
conversation.async_unset_agent(self.hass, self.entry)
await super().async_will_remove_from_hass()
def _fix_tool_name(self, tool_name: str) -> str:
"""Fix tool name if needed."""
# The Gemini 2.0+ tokenizer seemingly has a issue with the HassListAddItem tool
# name. This makes sure when it incorrectly changes the name, that we change it
# back for HA to call.
return tool_name if tool_name != "HasListAddItem" else "HassListAddItem"
async def _async_handle_message(
self,
user_input: conversation.ConversationInput,
@@ -372,162 +73,16 @@ class GoogleGenerativeAIConversationEntity(
options = self.entry.options
try:
await chat_log.async_update_llm_data(
DOMAIN,
user_input,
await chat_log.async_provide_llm_data(
user_input.as_llm_context(DOMAIN),
options.get(CONF_LLM_HASS_API),
options.get(CONF_PROMPT),
user_input.extra_system_prompt,
)
except conversation.ConverseError as err:
return err.as_conversation_result()
tools: list[Tool | Callable[..., Any]] | None = None
if chat_log.llm_api:
tools = [
_format_tool(tool, chat_log.llm_api.custom_serializer)
for tool in chat_log.llm_api.tools
]
# Using search grounding allows the model to retrieve information from the web,
# however, it may interfere with how the model decides to use some tools, or entities
# for example weather entity may be disregarded if the model chooses to Google it.
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
tools = tools or []
tools.append(Tool(google_search=GoogleSearch()))
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
supports_system_instruction = (
"gemma" not in model_name
and "gemini-2.0-flash-preview-image-generation" not in model_name
)
prompt_content = cast(
conversation.SystemContent,
chat_log.content[0],
)
if prompt_content.content:
prompt = prompt_content.content
else:
raise HomeAssistantError("Invalid prompt content")
messages: list[Content] = []
# Google groups tool results, we do not. Group them before sending.
tool_results: list[conversation.ToolResultContent] = []
for chat_content in chat_log.content[1:-1]:
if chat_content.role == "tool_result":
tool_results.append(chat_content)
continue
if (
not isinstance(chat_content, conversation.ToolResultContent)
and chat_content.content == ""
):
# Skipping is not possible since the number of function calls need to match the number of function responses
# and skipping one would mean removing the other and hence this would prevent a proper chat log
chat_content = replace(chat_content, content=" ")
if tool_results:
messages.append(_create_google_tool_response_content(tool_results))
tool_results.clear()
messages.append(_convert_content(chat_content))
# The SDK requires the first message to be a user message
# This is not the case if user used `start_conversation`
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
if messages and messages[0].role != "user":
messages.insert(
0,
Content(role="user", parts=[Part.from_text(text=" ")]),
)
if tool_results:
messages.append(_create_google_tool_response_content(tool_results))
generateContentConfig = GenerateContentConfig(
temperature=self.entry.options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
),
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
max_output_tokens=self.entry.options.get(
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
),
safety_settings=[
SafetySetting(
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
threshold=self.entry.options.get(
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
),
),
SafetySetting(
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
threshold=self.entry.options.get(
CONF_HARASSMENT_BLOCK_THRESHOLD,
RECOMMENDED_HARM_BLOCK_THRESHOLD,
),
),
SafetySetting(
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
threshold=self.entry.options.get(
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
),
),
SafetySetting(
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
threshold=self.entry.options.get(
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
),
),
],
tools=tools or None,
system_instruction=prompt if supports_system_instruction else None,
automatic_function_calling=AutomaticFunctionCallingConfig(
disable=True, maximum_remote_calls=None
),
)
if not supports_system_instruction:
messages = [
Content(role="user", parts=[Part.from_text(text=prompt)]),
Content(role="model", parts=[Part.from_text(text="Ok")]),
*messages,
]
chat = self._genai_client.aio.chats.create(
model=model_name, history=messages, config=generateContentConfig
)
chat_request: str | list[Part] = user_input.text
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
try:
chat_response_generator = await chat.send_message_stream(
message=chat_request
)
except (
APIError,
ClientError,
ValueError,
) as err:
LOGGER.error("Error sending message: %s %s", type(err), err)
error = ERROR_GETTING_RESPONSE
raise HomeAssistantError(error) from err
chat_request = _create_google_tool_response_parts(
[
content
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id,
_transform_stream(chat_response_generator),
)
if isinstance(content, conversation.ToolResultContent)
]
)
if not chat_log.unresponded_tool_results:
break
await self._async_handle_chat_log(chat_log)
response = intent.IntentResponse(language=user_input.language)
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
@@ -535,7 +90,7 @@ class GoogleGenerativeAIConversationEntity(
"Last content in chat log is not an AssistantContent: %s. This could be due to the model not returning a valid response",
chat_log.content[-1],
)
raise HomeAssistantError(f"{ERROR_GETTING_RESPONSE}")
raise HomeAssistantError(ERROR_GETTING_RESPONSE)
response.async_set_speech(chat_log.content[-1].content or "")
return conversation.ConversationResult(
response=response,

View File

@@ -0,0 +1,475 @@
"""Conversation support for the Google Generative AI Conversation integration."""
from __future__ import annotations
import codecs
from collections.abc import AsyncGenerator, Callable
from dataclasses import replace
from typing import Any, cast
from google.genai.errors import APIError, ClientError
from google.genai.types import (
AutomaticFunctionCallingConfig,
Content,
FunctionDeclaration,
GenerateContentConfig,
GenerateContentResponse,
GoogleSearch,
HarmCategory,
Part,
SafetySetting,
Schema,
Tool,
)
from voluptuous_openapi import convert
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, llm
from homeassistant.helpers.entity import Entity
from .const import (
CONF_CHAT_MODEL,
CONF_DANGEROUS_BLOCK_THRESHOLD,
CONF_HARASSMENT_BLOCK_THRESHOLD,
CONF_HATE_BLOCK_THRESHOLD,
CONF_MAX_TOKENS,
CONF_SEXUAL_BLOCK_THRESHOLD,
CONF_TEMPERATURE,
CONF_TOP_K,
CONF_TOP_P,
CONF_USE_GOOGLE_SEARCH_TOOL,
DOMAIN,
LOGGER,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_HARM_BLOCK_THRESHOLD,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_TOP_K,
RECOMMENDED_TOP_P,
)
# Max number of back and forth with the LLM to generate a response
MAX_TOOL_ITERATIONS = 10
ERROR_GETTING_RESPONSE = (
"Sorry, I had a problem getting a response from Google Generative AI."
)
SUPPORTED_SCHEMA_KEYS = {
# Gemini API does not support all of the OpenAPI schema
# SoT: https://ai.google.dev/api/caching#Schema
"type",
"format",
"description",
"nullable",
"enum",
"max_items",
"min_items",
"properties",
"required",
"items",
}
def _camel_to_snake(name: str) -> str:
"""Convert camel case to snake case."""
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
def _format_schema(schema: dict[str, Any]) -> Schema:
"""Format the schema to be compatible with Gemini API."""
if subschemas := schema.get("allOf"):
for subschema in subschemas: # Gemini API does not support allOf keys
if "type" in subschema: # Fallback to first subschema with 'type' field
return _format_schema(subschema)
return _format_schema(
subschemas[0]
) # Or, if not found, to any of the subschemas
result = {}
for key, val in schema.items():
key = _camel_to_snake(key)
if key not in SUPPORTED_SCHEMA_KEYS:
continue
if key == "type":
val = val.upper()
elif key == "format":
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
# formats that are not supported are ignored
if schema.get("type") == "string" and val not in ("enum", "date-time"):
continue
if schema.get("type") == "number" and val not in ("float", "double"):
continue
if schema.get("type") == "integer" and val not in ("int32", "int64"):
continue
if schema.get("type") not in ("string", "number", "integer"):
continue
elif key == "items":
val = _format_schema(val)
elif key == "properties":
val = {k: _format_schema(v) for k, v in val.items()}
result[key] = val
if result.get("enum") and result.get("type") != "STRING":
# enum is only allowed for STRING type. This is safe as long as the schema
# contains vol.Coerce for the respective type, for example:
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
result["type"] = "STRING"
result["enum"] = [str(item) for item in result["enum"]]
if result.get("type") == "OBJECT" and not result.get("properties"):
# An object with undefined properties is not supported by Gemini API.
# Fallback to JSON string. This will probably fail for most tools that want it,
# but we don't have a better fallback strategy so far.
result["properties"] = {"json": {"type": "STRING"}}
result["required"] = []
return cast(Schema, result)
def _format_tool(
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
) -> Tool:
"""Format tool specification."""
if tool.parameters.schema:
parameters = _format_schema(
convert(tool.parameters, custom_serializer=custom_serializer)
)
else:
parameters = None
return Tool(
function_declarations=[
FunctionDeclaration(
name=tool.name,
description=tool.description,
parameters=parameters,
)
]
)
def _escape_decode(value: Any) -> Any:
"""Recursively call codecs.escape_decode on all values."""
if isinstance(value, str):
return codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") # type: ignore[attr-defined]
if isinstance(value, list):
return [_escape_decode(item) for item in value]
if isinstance(value, dict):
return {k: _escape_decode(v) for k, v in value.items()}
return value
def _create_google_tool_response_parts(
parts: list[conversation.ToolResultContent],
) -> list[Part]:
"""Create Google tool response parts."""
return [
Part.from_function_response(
name=tool_result.tool_name, response=tool_result.tool_result
)
for tool_result in parts
]
def _create_google_tool_response_content(
content: list[conversation.ToolResultContent],
) -> Content:
"""Create a Google tool response content."""
return Content(
role="user",
parts=_create_google_tool_response_parts(content),
)
def _convert_content(
content: (
conversation.UserContent
| conversation.AssistantContent
| conversation.SystemContent
),
) -> Content:
"""Convert HA content to Google content."""
if content.role != "assistant" or not content.tool_calls:
role = "model" if content.role == "assistant" else content.role
return Content(
role=role,
parts=[
Part.from_text(text=content.content if content.content else ""),
],
)
# Handle the Assistant content with tool calls.
assert type(content) is conversation.AssistantContent
parts: list[Part] = []
if content.content:
parts.append(Part.from_text(text=content.content))
if content.tool_calls:
parts.extend(
[
Part.from_function_call(
name=tool_call.tool_name,
args=_escape_decode(tool_call.tool_args),
)
for tool_call in content.tool_calls
]
)
return Content(role="model", parts=parts)
async def _transform_stream(
result: AsyncGenerator[GenerateContentResponse],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
new_message = True
try:
async for response in result:
LOGGER.debug("Received response chunk: %s", response)
chunk: conversation.AssistantContentDeltaDict = {}
if new_message:
chunk["role"] = "assistant"
new_message = False
# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
if response.prompt_feedback or not response.candidates:
reason = (
response.prompt_feedback.block_reason_message
if response.prompt_feedback
else "unknown"
)
raise HomeAssistantError(
f"The message got blocked due to content violations, reason: {reason}"
)
candidate = response.candidates[0]
if (
candidate.finish_reason is not None
and candidate.finish_reason != "STOP"
):
# The message ended due to a content error as explained in: https://ai.google.dev/api/generate-content#FinishReason
LOGGER.error(
"Error in Google Generative AI response: %s, see: https://ai.google.dev/api/generate-content#FinishReason",
candidate.finish_reason,
)
raise HomeAssistantError(
f"{ERROR_GETTING_RESPONSE} Reason: {candidate.finish_reason}"
)
response_parts = (
candidate.content.parts
if candidate.content is not None and candidate.content.parts is not None
else []
)
content = "".join([part.text for part in response_parts if part.text])
tool_calls = []
for part in response_parts:
if not part.function_call:
continue
tool_call = part.function_call
tool_name = tool_call.name if tool_call.name else ""
tool_args = _escape_decode(tool_call.args)
tool_calls.append(
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
)
if tool_calls:
chunk["tool_calls"] = tool_calls
chunk["content"] = content
yield chunk
except (
APIError,
ValueError,
) as err:
LOGGER.error("Error sending message: %s %s", type(err), err)
if isinstance(err, APIError):
message = err.message
else:
message = type(err).__name__
error = f"{ERROR_GETTING_RESPONSE}: {message}"
raise HomeAssistantError(error) from err
class GoogleGenerativeAILLMBaseEntity(Entity):
"""Google Generative AI base entity."""
_attr_has_entity_name = True
_attr_name = None
def __init__(self, entry: ConfigEntry) -> None:
"""Initialize the agent."""
self.entry = entry
self._genai_client = entry.runtime_data
self._attr_unique_id = entry.entry_id
self._attr_device_info = dr.DeviceInfo(
identifiers={(DOMAIN, entry.entry_id)},
name=entry.title,
manufacturer="Google",
model="Generative AI",
entry_type=dr.DeviceEntryType.SERVICE,
)
async def _async_handle_chat_log(
self,
chat_log: conversation.ChatLog,
) -> None:
"""Generate an answer for the chat log."""
options = self.entry.options
tools: list[Tool | Callable[..., Any]] | None = None
if chat_log.llm_api:
tools = [
_format_tool(tool, chat_log.llm_api.custom_serializer)
for tool in chat_log.llm_api.tools
]
# Using search grounding allows the model to retrieve information from the web,
# however, it may interfere with how the model decides to use some tools, or entities
# for example weather entity may be disregarded if the model chooses to Google it.
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
tools = tools or []
tools.append(Tool(google_search=GoogleSearch()))
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
supports_system_instruction = (
"gemma" not in model_name
and "gemini-2.0-flash-preview-image-generation" not in model_name
)
prompt_content = cast(
conversation.SystemContent,
chat_log.content[0],
)
if prompt_content.content:
prompt = prompt_content.content
else:
raise HomeAssistantError("Invalid prompt content")
messages: list[Content] = []
# Google groups tool results, we do not. Group them before sending.
tool_results: list[conversation.ToolResultContent] = []
for chat_content in chat_log.content[1:-1]:
if chat_content.role == "tool_result":
tool_results.append(chat_content)
continue
if (
not isinstance(chat_content, conversation.ToolResultContent)
and chat_content.content == ""
):
# Skipping is not possible since the number of function calls need to match the number of function responses
# and skipping one would mean removing the other and hence this would prevent a proper chat log
chat_content = replace(chat_content, content=" ")
if tool_results:
messages.append(_create_google_tool_response_content(tool_results))
tool_results.clear()
messages.append(_convert_content(chat_content))
# The SDK requires the first message to be a user message
# This is not the case if user used `start_conversation`
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
if messages and messages[0].role != "user":
messages.insert(
0,
Content(role="user", parts=[Part.from_text(text=" ")]),
)
if tool_results:
messages.append(_create_google_tool_response_content(tool_results))
generateContentConfig = GenerateContentConfig(
temperature=self.entry.options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
),
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
max_output_tokens=self.entry.options.get(
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
),
safety_settings=[
SafetySetting(
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
threshold=self.entry.options.get(
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
),
),
SafetySetting(
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
threshold=self.entry.options.get(
CONF_HARASSMENT_BLOCK_THRESHOLD,
RECOMMENDED_HARM_BLOCK_THRESHOLD,
),
),
SafetySetting(
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
threshold=self.entry.options.get(
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
),
),
SafetySetting(
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
threshold=self.entry.options.get(
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
),
),
],
tools=tools or None,
system_instruction=prompt if supports_system_instruction else None,
automatic_function_calling=AutomaticFunctionCallingConfig(
disable=True, maximum_remote_calls=None
),
)
if not supports_system_instruction:
messages = [
Content(role="user", parts=[Part.from_text(text=prompt)]),
Content(role="model", parts=[Part.from_text(text="Ok")]),
*messages,
]
chat = self._genai_client.aio.chats.create(
model=model_name, history=messages, config=generateContentConfig
)
user_message = chat_log.content[-1]
assert isinstance(user_message, conversation.UserContent)
chat_request: str | list[Part] = user_message.content
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
try:
chat_response_generator = await chat.send_message_stream(
message=chat_request
)
except (
APIError,
ClientError,
ValueError,
) as err:
LOGGER.error("Error sending message: %s %s", type(err), err)
error = ERROR_GETTING_RESPONSE
raise HomeAssistantError(error) from err
chat_request = _create_google_tool_response_parts(
[
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(chat_response_generator),
)
if isinstance(content, conversation.ToolResultContent)
]
)
if not chat_log.unresponded_tool_results:
break

View File

@@ -0,0 +1,216 @@
"""Text to speech support for Google Generative AI."""
from __future__ import annotations
from contextlib import suppress
import io
import logging
from typing import Any
import wave
from google.genai import types
from homeassistant.components.tts import (
ATTR_VOICE,
TextToSpeechEntity,
TtsAudioType,
Voice,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ATTR_MODEL, DOMAIN, RECOMMENDED_TTS_MODEL
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up TTS entity."""
tts_entity = GoogleGenerativeAITextToSpeechEntity(config_entry)
async_add_entities([tts_entity])
class GoogleGenerativeAITextToSpeechEntity(TextToSpeechEntity):
"""Google Generative AI text-to-speech entity."""
_attr_supported_options = [ATTR_VOICE, ATTR_MODEL]
# See https://ai.google.dev/gemini-api/docs/speech-generation#languages
_attr_supported_languages = [
"ar-EG",
"bn-BD",
"de-DE",
"en-IN",
"en-US",
"es-US",
"fr-FR",
"hi-IN",
"id-ID",
"it-IT",
"ja-JP",
"ko-KR",
"mr-IN",
"nl-NL",
"pl-PL",
"pt-BR",
"ro-RO",
"ru-RU",
"ta-IN",
"te-IN",
"th-TH",
"tr-TR",
"uk-UA",
"vi-VN",
]
_attr_default_language = "en-US"
# See https://ai.google.dev/gemini-api/docs/speech-generation#voices
_supported_voices = [
Voice(voice.split(" ", 1)[0].lower(), voice)
for voice in (
"Zephyr (Bright)",
"Puck (Upbeat)",
"Charon (Informative)",
"Kore (Firm)",
"Fenrir (Excitable)",
"Leda (Youthful)",
"Orus (Firm)",
"Aoede (Breezy)",
"Callirrhoe (Easy-going)",
"Autonoe (Bright)",
"Enceladus (Breathy)",
"Iapetus (Clear)",
"Umbriel (Easy-going)",
"Algieba (Smooth)",
"Despina (Smooth)",
"Erinome (Clear)",
"Algenib (Gravelly)",
"Rasalgethi (Informative)",
"Laomedeia (Upbeat)",
"Achernar (Soft)",
"Alnilam (Firm)",
"Schedar (Even)",
"Gacrux (Mature)",
"Pulcherrima (Forward)",
"Achird (Friendly)",
"Zubenelgenubi (Casual)",
"Vindemiatrix (Gentle)",
"Sadachbia (Lively)",
"Sadaltager (Knowledgeable)",
"Sulafat (Warm)",
)
]
def __init__(self, entry: ConfigEntry) -> None:
"""Initialize Google Generative AI Conversation speech entity."""
self.entry = entry
self._attr_name = "Google Generative AI TTS"
self._attr_unique_id = f"{entry.entry_id}_tts"
self._attr_device_info = dr.DeviceInfo(
identifiers={(DOMAIN, entry.entry_id)},
name=entry.title,
manufacturer="Google",
model="Generative AI",
entry_type=dr.DeviceEntryType.SERVICE,
)
self._genai_client = entry.runtime_data
self._default_voice_id = self._supported_voices[0].voice_id
@callback
def async_get_supported_voices(self, language: str) -> list[Voice] | None:
"""Return a list of supported voices for a language."""
return self._supported_voices
async def async_get_tts_audio(
self, message: str, language: str, options: dict[str, Any]
) -> TtsAudioType:
"""Load tts audio file from the engine."""
try:
response = self._genai_client.models.generate_content(
model=options.get(ATTR_MODEL, RECOMMENDED_TTS_MODEL),
contents=message,
config=types.GenerateContentConfig(
response_modalities=["AUDIO"],
speech_config=types.SpeechConfig(
voice_config=types.VoiceConfig(
prebuilt_voice_config=types.PrebuiltVoiceConfig(
voice_name=options.get(
ATTR_VOICE, self._default_voice_id
)
)
)
),
),
)
data = response.candidates[0].content.parts[0].inline_data.data
mime_type = response.candidates[0].content.parts[0].inline_data.mime_type
except Exception as exc:
_LOGGER.warning(
"Error during processing of TTS request %s", exc, exc_info=True
)
raise HomeAssistantError(exc) from exc
return "wav", self._convert_to_wav(data, mime_type)
def _convert_to_wav(self, audio_data: bytes, mime_type: str) -> bytes:
"""Generate a WAV file header for the given audio data and parameters.
Args:
audio_data: The raw audio data as a bytes object.
mime_type: Mime type of the audio data.
Returns:
A bytes object representing the WAV file header.
"""
parameters = self._parse_audio_mime_type(mime_type)
wav_buffer = io.BytesIO()
with wave.open(wav_buffer, "wb") as wf:
wf.setnchannels(1)
wf.setsampwidth(parameters["bits_per_sample"] // 8)
wf.setframerate(parameters["rate"])
wf.writeframes(audio_data)
return wav_buffer.getvalue()
def _parse_audio_mime_type(self, mime_type: str) -> dict[str, int]:
"""Parse bits per sample and rate from an audio MIME type string.
Assumes bits per sample is encoded like "L16" and rate as "rate=xxxxx".
Args:
mime_type: The audio MIME type string (e.g., "audio/L16;rate=24000").
Returns:
A dictionary with "bits_per_sample" and "rate" keys. Values will be
integers if found, otherwise None.
"""
if not mime_type.startswith("audio/L"):
_LOGGER.warning("Received unexpected MIME type %s", mime_type)
raise HomeAssistantError(f"Unsupported audio MIME type: {mime_type}")
bits_per_sample = 16
rate = 24000
# Extract rate from parameters
parts = mime_type.split(";")
for param in parts: # Skip the main type part
param = param.strip()
if param.lower().startswith("rate="):
# Handle cases like "rate=" with no value or non-integer value and keep rate as default
with suppress(ValueError, IndexError):
rate_str = param.split("=", 1)[1]
rate = int(rate_str)
elif param.startswith("audio/L"):
# Keep bits_per_sample as default if conversion fails
with suppress(ValueError, IndexError):
bits_per_sample = int(param.split("L", 1)[1])
return {"bits_per_sample": bits_per_sample, "rate": rate}

View File

@@ -16,6 +16,7 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
@@ -77,85 +78,85 @@ def _read_file_contents(
return results
async def _async_handle_upload(call: ServiceCall) -> ServiceResponse:
"""Generate content from text and optionally images."""
config_entry: GooglePhotosConfigEntry | None = (
call.hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID])
)
if not config_entry:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="integration_not_found",
translation_placeholders={"target": DOMAIN},
)
scopes = config_entry.data["token"]["scope"].split(" ")
if UPLOAD_SCOPE not in scopes:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="missing_upload_permission",
translation_placeholders={"target": DOMAIN},
)
coordinator = config_entry.runtime_data
client_api = coordinator.client
upload_tasks = []
file_results = await call.hass.async_add_executor_job(
_read_file_contents, call.hass, call.data[CONF_FILENAME]
)
album = call.data[CONF_ALBUM]
try:
album_id = await coordinator.get_or_create_album(album)
except GooglePhotosApiError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="create_album_error",
translation_placeholders={"message": str(err)},
) from err
for mime_type, content in file_results:
upload_tasks.append(client_api.upload_content(content, mime_type))
try:
upload_results = await asyncio.gather(*upload_tasks)
except GooglePhotosApiError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="upload_error",
translation_placeholders={"message": str(err)},
) from err
try:
upload_result = await client_api.create_media_items(
[
NewMediaItem(SimpleMediaItem(upload_token=upload_result.upload_token))
for upload_result in upload_results
],
album_id=album_id,
)
except GooglePhotosApiError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="api_error",
translation_placeholders={"message": str(err)},
) from err
if call.return_response:
return {
"media_items": [
{"media_item_id": item_result.media_item.id}
for item_result in upload_result.new_media_item_results
if item_result.media_item and item_result.media_item.id
],
"album_id": album_id,
}
return None
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register Google Photos services."""
async def async_handle_upload(call: ServiceCall) -> ServiceResponse:
"""Generate content from text and optionally images."""
config_entry: GooglePhotosConfigEntry | None = (
hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID])
)
if not config_entry:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="integration_not_found",
translation_placeholders={"target": DOMAIN},
)
scopes = config_entry.data["token"]["scope"].split(" ")
if UPLOAD_SCOPE not in scopes:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="missing_upload_permission",
translation_placeholders={"target": DOMAIN},
)
coordinator = config_entry.runtime_data
client_api = coordinator.client
upload_tasks = []
file_results = await hass.async_add_executor_job(
_read_file_contents, hass, call.data[CONF_FILENAME]
)
album = call.data[CONF_ALBUM]
try:
album_id = await coordinator.get_or_create_album(album)
except GooglePhotosApiError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="create_album_error",
translation_placeholders={"message": str(err)},
) from err
for mime_type, content in file_results:
upload_tasks.append(client_api.upload_content(content, mime_type))
try:
upload_results = await asyncio.gather(*upload_tasks)
except GooglePhotosApiError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="upload_error",
translation_placeholders={"message": str(err)},
) from err
try:
upload_result = await client_api.create_media_items(
[
NewMediaItem(
SimpleMediaItem(upload_token=upload_result.upload_token)
)
for upload_result in upload_results
],
album_id=album_id,
)
except GooglePhotosApiError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="api_error",
translation_placeholders={"message": str(err)},
) from err
if call.return_response:
return {
"media_items": [
{"media_item_id": item_result.media_item.id}
for item_result in upload_result.new_media_item_results
if item_result.media_item and item_result.media_item.id
],
"album_id": album_id,
}
return None
hass.services.async_register(
DOMAIN,
UPLOAD_SERVICE,
async_handle_upload,
_async_handle_upload,
schema=UPLOAD_SERVICE_SCHEMA,
supports_response=SupportsResponse.OPTIONAL,
)

View File

@@ -13,7 +13,7 @@ from gspread.utils import ValueInputOption
import voluptuous as vol
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.selector import ConfigEntrySelector
@@ -76,6 +76,7 @@ async def _async_append_to_sheet(call: ServiceCall) -> None:
await call.hass.async_add_executor_job(_append_to_sheet, call, entry)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Add the services for Google Sheets."""

View File

@@ -35,6 +35,7 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
@@ -249,6 +250,7 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
return entry
@callback
def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
"""Set up services for Habitica integration."""

View File

@@ -1,9 +1,13 @@
"""The homee event platform."""
from pyHomee.const import AttributeType
from pyHomee.const import AttributeType, NodeProfile
from pyHomee.model import HomeeAttribute
from homeassistant.components.event import EventDeviceClass, EventEntity
from homeassistant.components.event import (
EventDeviceClass,
EventEntity,
EventEntityDescription,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -13,6 +17,38 @@ from .entity import HomeeEntity
PARALLEL_UPDATES = 0
REMOTE_PROFILES = [
NodeProfile.REMOTE,
NodeProfile.TWO_BUTTON_REMOTE,
NodeProfile.THREE_BUTTON_REMOTE,
NodeProfile.FOUR_BUTTON_REMOTE,
]
EVENT_DESCRIPTIONS: dict[AttributeType, EventEntityDescription] = {
AttributeType.BUTTON_STATE: EventEntityDescription(
key="button_state",
device_class=EventDeviceClass.BUTTON,
event_types=["upper", "lower", "released"],
),
AttributeType.UP_DOWN_REMOTE: EventEntityDescription(
key="up_down_remote",
device_class=EventDeviceClass.BUTTON,
event_types=[
"released",
"up",
"down",
"stop",
"up_long",
"down_long",
"stop_long",
"c_button",
"b_button",
"a_button",
],
),
}
async def async_setup_entry(
hass: HomeAssistant,
config_entry: HomeeConfigEntry,
@@ -21,30 +57,31 @@ async def async_setup_entry(
"""Add event entities for homee."""
async_add_entities(
HomeeEvent(attribute, config_entry)
HomeeEvent(attribute, config_entry, EVENT_DESCRIPTIONS[attribute.type])
for node in config_entry.runtime_data.nodes
for attribute in node.attributes
if attribute.type == AttributeType.UP_DOWN_REMOTE
if attribute.type in EVENT_DESCRIPTIONS
and node.profile in REMOTE_PROFILES
and not attribute.editable
)
class HomeeEvent(HomeeEntity, EventEntity):
"""Representation of a homee event."""
_attr_translation_key = "up_down_remote"
_attr_event_types = [
"released",
"up",
"down",
"stop",
"up_long",
"down_long",
"stop_long",
"c_button",
"b_button",
"a_button",
]
_attr_device_class = EventDeviceClass.BUTTON
def __init__(
self,
attribute: HomeeAttribute,
entry: HomeeConfigEntry,
description: EventEntityDescription,
) -> None:
"""Initialize the homee event entity."""
super().__init__(attribute, entry)
self.entity_description = description
self._attr_translation_key = description.key
if attribute.instance > 0:
self._attr_translation_key = f"{self._attr_translation_key}_instance"
self._attr_translation_placeholders = {"instance": str(attribute.instance)}
async def async_added_to_hass(self) -> None:
"""Add the homee event entity to home assistant."""
@@ -56,6 +93,5 @@ class HomeeEvent(HomeeEntity, EventEntity):
@callback
def _event_triggered(self, event: HomeeAttribute) -> None:
"""Handle a homee event."""
if event.type == AttributeType.UP_DOWN_REMOTE:
self._trigger_event(self.event_types[int(event.current_value)])
self.schedule_update_ha_state()
self._trigger_event(self.event_types[int(event.current_value)])
self.schedule_update_ha_state()

View File

@@ -160,12 +160,36 @@
}
},
"event": {
"button_state": {
"name": "Switch",
"state_attributes": {
"event_type": {
"state": {
"upper": "Upper button",
"lower": "Lower button",
"released": "Released"
}
}
}
},
"button_state_instance": {
"name": "Switch {instance}",
"state_attributes": {
"event_type": {
"state": {
"upper": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::upper%]",
"lower": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::lower%]",
"released": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::released%]"
}
}
}
},
"up_down_remote": {
"name": "Up/down remote",
"state_attributes": {
"event_type": {
"state": {
"release": "Released",
"release": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::released%]",
"up": "Up",
"down": "Down",
"stop": "Stop",

View File

@@ -128,6 +128,7 @@ class HomematicipHAP:
self.config_entry.data.get(HMIPC_AUTHTOKEN),
self.config_entry.data.get(HMIPC_NAME),
)
except HmipcConnectionError as err:
raise ConfigEntryNotReady from err
except Exception as err: # noqa: BLE001
@@ -210,41 +211,13 @@ class HomematicipHAP:
for device in self.home.devices:
device.fire_update_event()
async def async_connect(self) -> None:
"""Start WebSocket connection."""
tries = 0
while True:
retry_delay = 2 ** min(tries, 8)
async def async_connect(self, home: AsyncHome) -> None:
"""Connect to HomematicIP Cloud Websocket."""
await home.enable_events()
try:
await self.home.get_current_state_async()
hmip_events = self.home.enable_events()
self.home.set_on_connected_handler(self.ws_connected_handler)
self.home.set_on_disconnected_handler(self.ws_disconnected_handler)
tries = 0
await hmip_events
except HmipConnectionError:
_LOGGER.error(
(
"Error connecting to HomematicIP with HAP %s. "
"Retrying in %d seconds"
),
self.config_entry.unique_id,
retry_delay,
)
if self._ws_close_requested:
break
self._ws_close_requested = False
tries += 1
try:
self._retry_task = self.hass.async_create_task(
asyncio.sleep(retry_delay)
)
await self._retry_task
except asyncio.CancelledError:
break
home.set_on_connected_handler(self.ws_connected_handler)
home.set_on_disconnected_handler(self.ws_disconnected_handler)
home.set_on_reconnect_handler(self.ws_reconnected_handler)
async def async_reset(self) -> bool:
"""Close the websocket connection."""
@@ -272,14 +245,22 @@ class HomematicipHAP:
async def ws_connected_handler(self) -> None:
"""Handle websocket connected."""
_LOGGER.debug("WebSocket connection to HomematicIP established")
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
if self._ws_connection_closed.is_set():
await self.get_state()
self._ws_connection_closed.clear()
async def ws_disconnected_handler(self) -> None:
"""Handle websocket disconnection."""
_LOGGER.warning("WebSocket connection to HomematicIP closed")
_LOGGER.warning("Websocket connection to HomematicIP Cloud closed")
self._ws_connection_closed.set()
async def ws_reconnected_handler(self, reason: str) -> None:
"""Handle websocket reconnection."""
_LOGGER.info(
"Websocket connection to HomematicIP Cloud re-established due to reason: %s",
reason,
)
self._ws_connection_closed.set()
async def get_hap(
@@ -306,6 +287,6 @@ class HomematicipHAP:
home.on_update(self.async_update)
home.on_create(self.async_create_entity)
hass.loop.create_task(self.async_connect())
await self.async_connect(home)
return home

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
"iot_class": "cloud_push",
"loggers": ["homematicip"],
"requirements": ["homematicip==2.0.4"]
"requirements": ["homematicip==2.0.5"]
}

View File

@@ -4,13 +4,14 @@ from __future__ import annotations
from typing import Any
from homematicip.base.enums import DeviceType
from homematicip.base.enums import DeviceType, FunctionalChannelType
from homematicip.device import (
BrandSwitch2,
DinRailSwitch,
DinRailSwitch4,
FullFlushInputSwitch,
HeatingSwitch2,
MotionDetectorSwitchOutdoor,
MultiIOBox,
OpenCollector8Module,
PlugableSwitch,
@@ -47,18 +48,34 @@ async def async_setup_entry(
and getattr(device, "deviceType", None) != DeviceType.BRAND_SWITCH_MEASURING
):
entities.append(HomematicipSwitchMeasuring(hap, device))
elif isinstance(device, WiredSwitch8):
elif isinstance(
device,
(
WiredSwitch8,
OpenCollector8Module,
BrandSwitch2,
PrintedCircuitBoardSwitch2,
HeatingSwitch2,
MultiIOBox,
MotionDetectorSwitchOutdoor,
DinRailSwitch,
DinRailSwitch4,
),
):
channel_indices = [
ch.index
for ch in device.functionalChannels
if ch.functionalChannelType
in (
FunctionalChannelType.SWITCH_CHANNEL,
FunctionalChannelType.MULTI_MODE_INPUT_SWITCH_CHANNEL,
)
]
entities.extend(
HomematicipMultiSwitch(hap, device, channel=channel)
for channel in range(1, 9)
)
elif isinstance(device, DinRailSwitch):
entities.append(HomematicipMultiSwitch(hap, device, channel=1))
elif isinstance(device, DinRailSwitch4):
entities.extend(
HomematicipMultiSwitch(hap, device, channel=channel)
for channel in range(1, 5)
for channel in channel_indices
)
elif isinstance(
device,
(
@@ -68,24 +85,6 @@ async def async_setup_entry(
),
):
entities.append(HomematicipSwitch(hap, device))
elif isinstance(device, OpenCollector8Module):
entities.extend(
HomematicipMultiSwitch(hap, device, channel=channel)
for channel in range(1, 9)
)
elif isinstance(
device,
(
BrandSwitch2,
PrintedCircuitBoardSwitch2,
HeatingSwitch2,
MultiIOBox,
),
):
entities.extend(
HomematicipMultiSwitch(hap, device, channel=channel)
for channel in range(1, 3)
)
async_add_entities(entities)
@@ -108,15 +107,15 @@ class HomematicipMultiSwitch(HomematicipGenericEntity, SwitchEntity):
@property
def is_on(self) -> bool:
"""Return true if switch is on."""
return self._device.functionalChannels[self._channel].on
return self.functional_channel.on
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self._device.turn_on_async(self._channel)
await self.functional_channel.async_turn_on()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self._device.turn_off_async(self._channel)
await self.functional_channel.async_turn_off()
class HomematicipSwitch(HomematicipMultiSwitch, SwitchEntity):

View File

@@ -12,6 +12,6 @@
"iot_class": "local_polling",
"loggers": ["homewizard_energy"],
"quality_scale": "platinum",
"requirements": ["python-homewizard-energy==8.3.3"],
"requirements": ["python-homewizard-energy==9.1.1"],
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
}

View File

@@ -8,7 +8,7 @@ import logging
from aiohue import HueBridgeV1, HueBridgeV2
import voluptuous as vol
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.service import verify_domain_control
@@ -25,6 +25,7 @@ from .const import (
LOGGER = logging.getLogger(__name__)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register services for Hue integration."""

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
import voluptuous as vol
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.util import slugify
@@ -115,6 +115,7 @@ def _get_account(hass: HomeAssistant, account_identifier: str) -> IcloudAccount:
return icloud_account
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register iCloud services."""

View File

@@ -15,6 +15,7 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
@@ -39,6 +40,7 @@ OMER_SCHEMA = vol.Schema(
)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up the Jewish Calendar services."""

View File

@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/lcn",
"iot_class": "local_push",
"loggers": ["pypck"],
"requirements": ["pypck==0.8.7", "lcn-frontend==0.2.5"]
"requirements": ["pypck==0.8.8", "lcn-frontend==0.2.5"]
}

View File

@@ -16,6 +16,7 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, device_registry as dr
@@ -438,6 +439,7 @@ SERVICES = (
)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register services for LCN."""
for service_name, service in SERVICES:

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
import asyncio
from datetime import timedelta
import logging
from letpot.deviceclient import LetPotDeviceClient
@@ -42,6 +43,7 @@ class LetPotDeviceCoordinator(DataUpdateCoordinator[LetPotDeviceStatus]):
_LOGGER,
config_entry=config_entry,
name=f"LetPot {device.serial_number}",
update_interval=timedelta(minutes=10),
)
self._info = info
self.device = device

View File

@@ -5,9 +5,9 @@ rules:
comment: |
This integration does not provide additional actions.
appropriate-polling:
status: exempt
status: done
comment: |
This integration only receives push-based updates.
Primarily uses push, but polls with a long interval for availability and missed updates.
brands: done
common-modules: done
config-flow-test-coverage: done
@@ -39,7 +39,7 @@ rules:
comment: |
The integration does not have configuration options.
docs-installation-parameters: done
entity-unavailable: todo
entity-unavailable: done
integration-owner: done
log-when-unavailable: todo
parallel-updates: done

View File

@@ -13,5 +13,5 @@
"iot_class": "cloud_push",
"loggers": ["pylitterbot"],
"quality_scale": "bronze",
"requirements": ["pylitterbot==2024.0.0"]
"requirements": ["pylitterbot==2024.2.0"]
}

View File

@@ -36,11 +36,6 @@ _LOGGER = logging.getLogger(__name__)
PRODID = "-//homeassistant.io//local_calendar 1.0//EN"
# The calendar on disk is only changed when this entity is updated, so there
# is no need to poll for changes. The calendar enttiy base class will handle
# refreshing the entity state based on the start or end time of the event.
SCAN_INTERVAL = timedelta(days=1)
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -88,7 +88,6 @@ class ModelContextProtocolSSEView(HomeAssistantView):
context = llm.LLMContext(
platform=DOMAIN,
context=self.context(request),
user_prompt=None,
language="*",
assistant=conversation.DOMAIN,
device_id=None,

View File

@@ -62,6 +62,7 @@ TILT_DEVICE_MAP = {
BlindType.VerticalBlind: CoverDeviceClass.BLIND,
BlindType.VerticalBlindLeft: CoverDeviceClass.BLIND,
BlindType.VerticalBlindRight: CoverDeviceClass.BLIND,
BlindType.RollerTiltMotor: CoverDeviceClass.BLIND,
}
TILT_ONLY_DEVICE_MAP = {

View File

@@ -21,5 +21,5 @@
"documentation": "https://www.home-assistant.io/integrations/motion_blinds",
"iot_class": "local_push",
"loggers": ["motionblinds"],
"requirements": ["motionblinds==0.6.27"]
"requirements": ["motionblinds==0.6.28"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/nextbus",
"iot_class": "cloud_polling",
"loggers": ["py_nextbus"],
"requirements": ["py-nextbusnext==2.2.0"]
"requirements": ["py-nextbusnext==2.3.0"]
}

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
@@ -11,15 +10,14 @@ from .const import (
CONF_AREA_FILTER,
CONF_FILTER_CORONA,
CONF_HEADLINE_FILTER,
DOMAIN,
NO_MATCH_REGEX,
)
from .coordinator import NINADataUpdateCoordinator
from .coordinator import NinaConfigEntry, NINADataUpdateCoordinator
PLATFORMS: list[str] = [Platform.BINARY_SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool:
"""Set up platform from a ConfigEntry."""
if CONF_HEADLINE_FILTER not in entry.data:
filter_regex = NO_MATCH_REGEX
@@ -41,18 +39,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
async def _async_update_listener(hass: HomeAssistant, entry: NinaConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -30,17 +30,17 @@ from .const import (
CONF_REGIONS,
DOMAIN,
)
from .coordinator import NINADataUpdateCoordinator
from .coordinator import NinaConfigEntry, NINADataUpdateCoordinator
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: NinaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up entries."""
coordinator: NINADataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]
coordinator = config_entry.runtime_data
regions: dict[str, str] = config_entry.data[CONF_REGIONS]
message_slots: int = config_entry.data[CONF_MESSAGE_SLOTS]

View File

@@ -23,6 +23,8 @@ from .const import (
SCAN_INTERVAL,
)
type NinaConfigEntry = ConfigEntry[NINADataUpdateCoordinator]
@dataclass
class NinaWarningData:

View File

@@ -22,6 +22,7 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv
@@ -66,6 +67,7 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> NordPoolConfigEntry:
return entry
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up services for Nord Pool integration."""

View File

@@ -2,7 +2,7 @@
import voluptuous as vol
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv
@@ -48,6 +48,7 @@ def set_speed(call: ServiceCall) -> None:
_get_coordinator(call).nzbget.rate(call.data[ATTR_SPEED])
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register integration-level services."""

View File

@@ -11,6 +11,7 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import selector
@@ -70,6 +71,7 @@ def __get_client(call: ServiceCall) -> OhmeApiClient:
return entry.runtime_data.charge_session_coordinator.client
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register services."""

View File

@@ -218,19 +218,41 @@ class OllamaConversationEntity(
"""Call the API."""
settings = {**self.entry.data, **self.entry.options}
client = self.hass.data[DOMAIN][self.entry.entry_id]
model = settings[CONF_MODEL]
try:
await chat_log.async_update_llm_data(
DOMAIN,
user_input,
await chat_log.async_provide_llm_data(
user_input.as_llm_context(DOMAIN),
settings.get(CONF_LLM_HASS_API),
settings.get(CONF_PROMPT),
user_input.extra_system_prompt,
)
except conversation.ConverseError as err:
return err.as_conversation_result()
await self._async_handle_chat_log(chat_log)
# Create intent response
intent_response = intent.IntentResponse(language=user_input.language)
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
raise TypeError(
f"Unexpected last message type: {type(chat_log.content[-1])}"
)
intent_response.async_set_speech(chat_log.content[-1].content or "")
return conversation.ConversationResult(
response=intent_response,
conversation_id=chat_log.conversation_id,
continue_conversation=chat_log.continue_conversation,
)
async def _async_handle_chat_log(
self,
chat_log: conversation.ChatLog,
) -> None:
"""Generate an answer for the chat log."""
settings = {**self.entry.data, **self.entry.options}
client = self.hass.data[DOMAIN][self.entry.entry_id]
model = settings[CONF_MODEL]
tools: list[dict[str, Any]] | None = None
if chat_log.llm_api:
tools = [
@@ -269,7 +291,7 @@ class OllamaConversationEntity(
[
_convert_content(content)
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id, _transform_stream(response_generator)
self.entity_id, _transform_stream(response_generator)
)
]
)
@@ -277,19 +299,6 @@ class OllamaConversationEntity(
if not chat_log.unresponded_tool_results:
break
# Create intent response
intent_response = intent.IntentResponse(language=user_input.language)
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
raise TypeError(
f"Unexpected last message type: {type(chat_log.content[-1])}"
)
intent_response.async_set_speech(chat_log.content[-1].content or "")
return conversation.ConversationResult(
response=intent_response,
conversation_id=chat_log.conversation_id,
continue_conversation=chat_log.continue_conversation,
)
def _trim_history(self, message_history: MessageHistory, max_messages: int) -> None:
"""Trims excess messages from a single history.

View File

@@ -16,6 +16,7 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
@@ -70,6 +71,7 @@ def _read_file_contents(
return results
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register OneDrive services."""

View File

@@ -8,7 +8,7 @@ import voluptuous as vol
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.util.hass_dict import HassKey
@@ -40,6 +40,7 @@ ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema(
SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output"
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register Onkyo services."""

View File

@@ -279,11 +279,11 @@ class OpenAIConversationEntity(
options = self.entry.options
try:
await chat_log.async_update_llm_data(
DOMAIN,
user_input,
await chat_log.async_provide_llm_data(
user_input.as_llm_context(DOMAIN),
options.get(CONF_LLM_HASS_API),
options.get(CONF_PROMPT),
user_input.extra_system_prompt,
)
except conversation.ConverseError as err:
return err.as_conversation_result()

View File

@@ -15,7 +15,7 @@ from homeassistant.const import (
ATTR_TEMPERATURE,
ATTR_TIME,
)
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv
@@ -61,6 +61,7 @@ def _get_gateway(call: ServiceCall) -> OpenThermGatewayHub:
return gw_hub
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register services for the component."""
service_reset_schema = vol.Schema({vol.Required(ATTR_GW_ID): vol.All(cv.string)})

View File

@@ -10,6 +10,7 @@ from opower import (
CannotConnect,
InvalidAuth,
Opower,
create_cookie_jar,
get_supported_utility_names,
select_utility,
)
@@ -39,7 +40,7 @@ async def _validate_login(
) -> dict[str, str]:
"""Validate login data and return any errors."""
api = Opower(
async_create_clientsession(hass),
async_create_clientsession(hass, cookie_jar=create_cookie_jar()),
login_data[CONF_UTILITY],
login_data[CONF_USERNAME],
login_data[CONF_PASSWORD],

View File

@@ -12,6 +12,7 @@ from opower import (
MeterType,
Opower,
ReadResolution,
create_cookie_jar,
)
from opower.exceptions import ApiException, CannotConnect, InvalidAuth
@@ -30,7 +31,8 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, UnitOfEnergy, UnitOfVolume
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import aiohttp_client, issue_registry as ir
from homeassistant.helpers import issue_registry as ir
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
@@ -62,7 +64,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
update_interval=timedelta(hours=12),
)
self.api = Opower(
aiohttp_client.async_get_clientsession(hass),
async_create_clientsession(hass, cookie_jar=create_cookie_jar()),
config_entry.data[CONF_UTILITY],
config_entry.data[CONF_USERNAME],
config_entry.data[CONF_PASSWORD],

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/opower",
"iot_class": "cloud_polling",
"loggers": ["opower"],
"requirements": ["opower==0.12.3"]
"requirements": ["opower==0.12.4"]
}

View File

@@ -7,7 +7,7 @@ from typing import cast
from python_picnic_api2 import PicnicAPI
import voluptuous as vol
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from .const import (
@@ -26,6 +26,7 @@ class PicnicServiceException(Exception):
"""Exception for Picnic services."""
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register services for the Picnic integration, if not registered yet."""

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
import voluptuous as vol
from homeassistant.const import ATTR_COMMAND, ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from .const import COMMANDS, DOMAIN, PS4_DATA
@@ -29,6 +29,7 @@ async def async_service_command(call: ServiceCall) -> None:
await device.async_send_command(command)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Handle for services."""

View File

@@ -4,6 +4,7 @@ from datetime import datetime
import logging
from ical.event import Event
from ical.timeline import Timeline
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
from homeassistant.core import HomeAssistant
@@ -48,12 +49,18 @@ class RemoteCalendarEntity(
super().__init__(coordinator)
self._attr_name = entry.data[CONF_CALENDAR_NAME]
self._attr_unique_id = entry.entry_id
self._event: CalendarEvent | None = None
self._timeline: Timeline | None = None
@property
def event(self) -> CalendarEvent | None:
"""Return the next upcoming event."""
return self._event
if self._timeline is None:
return None
now = dt_util.now()
events = self._timeline.active_after(now)
if event := next(events, None):
return _get_calendar_event(event)
return None
async def async_get_events(
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
@@ -79,15 +86,12 @@ class RemoteCalendarEntity(
"""
await super().async_update()
def next_timeline_event() -> CalendarEvent | None:
def _get_timeline() -> Timeline | None:
"""Return the next active event."""
now = dt_util.now()
events = self.coordinator.data.timeline_tz(now.tzinfo).active_after(now)
if event := next(events, None):
return _get_calendar_event(event)
return None
return self.coordinator.data.timeline_tz(now.tzinfo)
self._event = await self.hass.async_add_executor_job(next_timeline_event)
self._timeline = await self.hass.async_add_executor_job(_get_timeline)
def _get_calendar_event(event: Event) -> CalendarEvent:

View File

@@ -1,7 +1,7 @@
{
"domain": "remote_calendar",
"name": "Remote Calendar",
"codeowners": ["@Thomas55555"],
"codeowners": ["@Thomas55555", "@allenporter"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/remote_calendar",
"integration_type": "service",

View File

@@ -172,6 +172,9 @@
"floodlight_brightness": {
"default": "mdi:spotlight-beam"
},
"ir_brightness": {
"default": "mdi:led-off"
},
"volume": {
"default": "mdi:volume-high",
"state": {
@@ -217,6 +220,9 @@
"ai_animal_sensitivity": {
"default": "mdi:paw"
},
"cry_sensitivity": {
"default": "mdi:emoticon-cry-outline"
},
"crossline_sensitivity": {
"default": "mdi:fence"
},

View File

@@ -19,5 +19,5 @@
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"quality_scale": "platinum",
"requirements": ["reolink-aio==0.14.0"]
"requirements": ["reolink-aio==0.14.1"]
}

View File

@@ -122,6 +122,20 @@ NUMBER_ENTITIES = (
value=lambda api, ch: api.whiteled_brightness(ch),
method=lambda api, ch, value: api.set_whiteled(ch, brightness=int(value)),
),
ReolinkNumberEntityDescription(
key="ir_brightness",
cmd_key="208",
translation_key="ir_brightness",
entity_category=EntityCategory.CONFIG,
native_step=1,
native_min_value=0,
native_max_value=100,
supported=lambda api, ch: api.supported(ch, "ir_brightness"),
value=lambda api, ch: api.baichuan.ir_brightness(ch),
method=lambda api, ch, value: (
api.baichuan.set_status_led(ch, ir_brightness=int(value))
),
),
ReolinkNumberEntityDescription(
key="volume",
cmd_key="GetAudioCfg",
@@ -258,6 +272,18 @@ NUMBER_ENTITIES = (
value=lambda api, ch: api.ai_sensitivity(ch, "dog_cat"),
method=lambda api, ch, value: api.set_ai_sensitivity(ch, int(value), "dog_cat"),
),
ReolinkNumberEntityDescription(
key="cry_sensitivity",
cmd_key="299",
translation_key="cry_sensitivity",
entity_category=EntityCategory.CONFIG,
native_step=1,
native_min_value=1,
native_max_value=5,
supported=lambda api, ch: api.supported(ch, "ai_cry"),
value=lambda api, ch: api.baichuan.cry_sensitivity(ch),
method=lambda api, ch, value: api.baichuan.set_cry_detection(ch, int(value)),
),
ReolinkNumberEntityDescription(
key="ai_face_delay",
cmd_key="GetAiAlarm",

View File

@@ -19,51 +19,54 @@ from .util import get_device_uid_and_ch, raise_translated_error
ATTR_RINGTONE = "ringtone"
@raise_translated_error
async def _async_play_chime(service_call: ServiceCall) -> None:
"""Play a ringtone."""
service_data = service_call.data
device_registry = dr.async_get(service_call.hass)
for device_id in service_data[ATTR_DEVICE_ID]:
config_entry = None
device = device_registry.async_get(device_id)
if device is not None:
for entry_id in device.config_entries:
config_entry = service_call.hass.config_entries.async_get_entry(
entry_id
)
if config_entry is not None and config_entry.domain == DOMAIN:
break
if (
config_entry is None
or device is None
or config_entry.state != ConfigEntryState.LOADED
):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="service_entry_ex",
translation_placeholders={"service_name": "play_chime"},
)
host: ReolinkHost = config_entry.runtime_data.host
(device_uid, chime_id, is_chime) = get_device_uid_and_ch(device, host)
chime: Chime | None = host.api.chime(chime_id)
if not is_chime or chime is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="service_not_chime",
translation_placeholders={"device_name": str(device.name)},
)
ringtone = service_data[ATTR_RINGTONE]
await chime.play(ChimeToneEnum[ringtone].value)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up Reolink services."""
@raise_translated_error
async def async_play_chime(service_call: ServiceCall) -> None:
"""Play a ringtone."""
service_data = service_call.data
device_registry = dr.async_get(hass)
for device_id in service_data[ATTR_DEVICE_ID]:
config_entry = None
device = device_registry.async_get(device_id)
if device is not None:
for entry_id in device.config_entries:
config_entry = hass.config_entries.async_get_entry(entry_id)
if config_entry is not None and config_entry.domain == DOMAIN:
break
if (
config_entry is None
or device is None
or config_entry.state != ConfigEntryState.LOADED
):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="service_entry_ex",
translation_placeholders={"service_name": "play_chime"},
)
host: ReolinkHost = config_entry.runtime_data.host
(device_uid, chime_id, is_chime) = get_device_uid_and_ch(device, host)
chime: Chime | None = host.api.chime(chime_id)
if not is_chime or chime is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="service_not_chime",
translation_placeholders={"device_name": str(device.name)},
)
ringtone = service_data[ATTR_RINGTONE]
await chime.play(ChimeToneEnum[ringtone].value)
hass.services.async_register(
DOMAIN,
"play_chime",
async_play_chime,
_async_play_chime,
schema=vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): list[str],

View File

@@ -532,6 +532,9 @@
"floodlight_brightness": {
"name": "Floodlight turn on brightness"
},
"ir_brightness": {
"name": "Infrared light brightness"
},
"volume": {
"name": "Volume"
},
@@ -568,6 +571,9 @@
"ai_animal_sensitivity": {
"name": "AI animal sensitivity"
},
"cry_sensitivity": {
"name": "Baby cry sensitivity"
},
"crossline_sensitivity": {
"name": "AI crossline {zone_name} sensitivity"
},

View File

@@ -10,7 +10,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["rokuecp"],
"requirements": ["rokuecp==0.19.3"],
"requirements": ["rokuecp==0.19.5"],
"ssdp": [
{
"st": "roku:ecp",

View File

@@ -76,10 +76,10 @@ class SamsungTVEntity(CoordinatorEntity[SamsungTVDataUpdateCoordinator], Entity)
def _wake_on_lan(self) -> None:
"""Wake the device via wake on lan."""
send_magic_packet(self._mac, ip_address=self._host)
send_magic_packet(self._mac, ip_address=self._host) # type: ignore[arg-type]
# If the ip address changed since we last saw the device
# broadcast a packet as well
send_magic_packet(self._mac)
send_magic_packet(self._mac) # type: ignore[arg-type]
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the device off."""

View File

@@ -38,7 +38,7 @@
"getmac==0.9.5",
"samsungctl[websocket]==0.7.1",
"samsungtvws[async,encrypted]==2.7.2",
"wakeonlan==2.1.0",
"wakeonlan==3.1.0",
"async-upnp-client==0.44.0"
],
"ssdp": [

View File

@@ -235,11 +235,15 @@ class ShellyButton(ShellyBaseButton):
self._attr_unique_id = f"{coordinator.mac}_{description.key}"
if isinstance(coordinator, ShellyBlockCoordinator):
self._attr_device_info = get_block_device_info(
coordinator.device, coordinator.mac
coordinator.device,
coordinator.mac,
suggested_area=coordinator.suggested_area,
)
else:
self._attr_device_info = get_rpc_device_info(
coordinator.device, coordinator.mac
coordinator.device,
coordinator.mac,
suggested_area=coordinator.suggested_area,
)
self._attr_device_info = DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, coordinator.mac)}

View File

@@ -211,7 +211,10 @@ class BlockSleepingClimate(
elif entry is not None:
self._unique_id = entry.unique_id
self._attr_device_info = get_block_device_info(
coordinator.device, coordinator.mac, sensor_block
coordinator.device,
coordinator.mac,
sensor_block,
suggested_area=coordinator.suggested_area,
)
self._attr_name = get_block_entity_name(
self.coordinator.device, sensor_block, None

View File

@@ -31,7 +31,11 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
from homeassistant.helpers import device_registry as dr, issue_registry as ir
from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
issue_registry as ir,
)
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -114,6 +118,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice](
self.device = device
self.device_id: str | None = None
self._pending_platforms: list[Platform] | None = None
self.suggested_area: str | None = None
device_name = device.name if device.initialized else entry.title
interval_td = timedelta(seconds=update_interval)
# The device has come online at least once. In the case of a sleeping RPC
@@ -176,6 +181,11 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice](
hw_version=f"gen{get_device_entry_gen(self.config_entry)}",
configuration_url=f"http://{get_host(self.config_entry.data[CONF_HOST])}:{get_http_port(self.config_entry.data)}",
)
# We want to use the main device area as the suggested area for sub-devices.
if (area_id := device_entry.area_id) is not None:
area_registry = ar.async_get(self.hass)
if (area := area_registry.async_get_area(area_id)) is not None:
self.suggested_area = area.name
self.device_id = device_entry.id
async def shutdown(self) -> None:

View File

@@ -362,7 +362,10 @@ class ShellyBlockEntity(CoordinatorEntity[ShellyBlockCoordinator]):
self.block = block
self._attr_name = get_block_entity_name(coordinator.device, block)
self._attr_device_info = get_block_device_info(
coordinator.device, coordinator.mac, block
coordinator.device,
coordinator.mac,
block,
suggested_area=coordinator.suggested_area,
)
self._attr_unique_id = f"{coordinator.mac}-{block.description}"
@@ -405,7 +408,10 @@ class ShellyRpcEntity(CoordinatorEntity[ShellyRpcCoordinator]):
super().__init__(coordinator)
self.key = key
self._attr_device_info = get_rpc_device_info(
coordinator.device, coordinator.mac, key
coordinator.device,
coordinator.mac,
key,
suggested_area=coordinator.suggested_area,
)
self._attr_unique_id = f"{coordinator.mac}-{key}"
self._attr_name = get_rpc_entity_name(coordinator.device, key)
@@ -521,7 +527,9 @@ class ShellyRestAttributeEntity(CoordinatorEntity[ShellyBlockCoordinator]):
)
self._attr_unique_id = f"{coordinator.mac}-{attribute}"
self._attr_device_info = get_block_device_info(
coordinator.device, coordinator.mac
coordinator.device,
coordinator.mac,
suggested_area=coordinator.suggested_area,
)
self._last_value = None
@@ -630,7 +638,10 @@ class ShellySleepingBlockAttributeEntity(ShellyBlockAttributeEntity):
self.entity_description = description
self._attr_device_info = get_block_device_info(
coordinator.device, coordinator.mac, block
coordinator.device,
coordinator.mac,
block,
suggested_area=coordinator.suggested_area,
)
if block is not None:
@@ -698,7 +709,10 @@ class ShellySleepingRpcAttributeEntity(ShellyRpcAttributeEntity):
self.entity_description = description
self._attr_device_info = get_rpc_device_info(
coordinator.device, coordinator.mac, key
coordinator.device,
coordinator.mac,
key,
suggested_area=coordinator.suggested_area,
)
self._attr_unique_id = self._attr_unique_id = (
f"{coordinator.mac}-{key}-{attribute}"

View File

@@ -207,7 +207,10 @@ class ShellyRpcEvent(CoordinatorEntity[ShellyRpcCoordinator], EventEntity):
super().__init__(coordinator)
self.event_id = int(key.split(":")[-1])
self._attr_device_info = get_rpc_device_info(
coordinator.device, coordinator.mac, key
coordinator.device,
coordinator.mac,
key,
suggested_area=coordinator.suggested_area,
)
self._attr_unique_id = f"{coordinator.mac}-{key}"
self._attr_name = get_rpc_entity_name(coordinator.device, key)

View File

@@ -139,7 +139,11 @@ class RpcEmeterPhaseSensor(RpcSensor):
super().__init__(coordinator, key, attribute, description)
self._attr_device_info = get_rpc_device_info(
coordinator.device, coordinator.mac, key, description.emeter_phase
coordinator.device,
coordinator.mac,
key,
emeter_phase=description.emeter_phase,
suggested_area=coordinator.suggested_area,
)

View File

@@ -751,6 +751,7 @@ def get_rpc_device_info(
mac: str,
key: str | None = None,
emeter_phase: str | None = None,
suggested_area: str | None = None,
) -> DeviceInfo:
"""Return device info for RPC device."""
if key is None:
@@ -770,6 +771,7 @@ def get_rpc_device_info(
identifiers={(DOMAIN, f"{mac}-{key}-{emeter_phase.lower()}")},
name=get_rpc_sub_device_name(device, key, emeter_phase),
manufacturer="Shelly",
suggested_area=suggested_area,
via_device=(DOMAIN, mac),
)
@@ -784,6 +786,7 @@ def get_rpc_device_info(
identifiers={(DOMAIN, f"{mac}-{key}")},
name=get_rpc_sub_device_name(device, key),
manufacturer="Shelly",
suggested_area=suggested_area,
via_device=(DOMAIN, mac),
)
@@ -805,7 +808,10 @@ def get_blu_trv_device_info(
def get_block_device_info(
device: BlockDevice, mac: str, block: Block | None = None
device: BlockDevice,
mac: str,
block: Block | None = None,
suggested_area: str | None = None,
) -> DeviceInfo:
"""Return device info for Block device."""
if (
@@ -820,6 +826,7 @@ def get_block_device_info(
identifiers={(DOMAIN, f"{mac}-{block.description}")},
name=get_block_sub_device_name(device, block),
manufacturer="Shelly",
suggested_area=suggested_area,
via_device=(DOMAIN, mac),
)

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==1.1.1"]
"requirements": ["tesla-fleet-api==1.1.3"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==1.1.1", "teslemetry-stream==0.7.9"]
"requirements": ["tesla-fleet-api==1.1.3", "teslemetry-stream==0.7.9"]
}

View File

@@ -7,7 +7,7 @@ from voluptuous import All, Range
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_DEVICE_ID, CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv, device_registry as dr
@@ -98,6 +98,7 @@ def async_get_energy_site_for_entry(
return energy_data
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up the Teslemetry services."""

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/tessie",
"iot_class": "cloud_polling",
"loggers": ["tessie", "tesla-fleet-api"],
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.1.1"]
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.1.3"]
}

View File

@@ -303,6 +303,7 @@ SERVICES = [
]
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up the global UniFi Protect services."""

View File

@@ -52,7 +52,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
)
await hass.async_add_executor_job(
partial(wakeonlan.send_magic_packet, mac_address, **service_kwargs)
partial(wakeonlan.send_magic_packet, mac_address, **service_kwargs) # type: ignore[arg-type]
)
hass.services.async_register(

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/wake_on_lan",
"iot_class": "local_push",
"requirements": ["wakeonlan==2.1.0"]
"requirements": ["wakeonlan==3.1.0"]
}

View File

@@ -4,7 +4,7 @@ import voluptuous as vol
from yolink.client_request import ClientRequest
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, device_registry as dr
@@ -25,6 +25,7 @@ _SPEAKER_HUB_PLAY_CALL_OPTIONAL_ATTRS = (
)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register services for YoLink integration."""

View File

@@ -58,6 +58,7 @@ TARGET_VALIDATORS = {
}
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register integration services."""
services = ZWaveServices(hass, er.async_get(hass), dr.async_get(hass))

View File

@@ -40,6 +40,7 @@ PLATFORM_FORMAT: Final = "{platform}.{domain}"
class Platform(StrEnum):
"""Available entity platforms."""
AI_TASK = "ai_task"
AIR_QUALITY = "air_quality"
ALARM_CONTROL_PANEL = "alarm_control_panel"
ASSIST_SATELLITE = "assist_satellite"

Some files were not shown because too many files have changed in this diff Show More