mirror of
https://github.com/home-assistant/core.git
synced 2025-11-30 04:58:01 +00:00
78 lines
2.5 KiB
Python
78 lines
2.5 KiB
Python
"""Conversation support for Home Assistant Cloud."""
|
|
|
|
from __future__ import annotations
|
|
|
|
from typing import Literal
|
|
|
|
from hass_nabucasa import NabuCasaBaseError
|
|
from hass_nabucasa.llm import LLMError
|
|
|
|
from homeassistant.components import conversation
|
|
from homeassistant.config_entries import ConfigEntry
|
|
from homeassistant.const import MATCH_ALL
|
|
from homeassistant.core import HomeAssistant
|
|
from homeassistant.helpers import llm
|
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|
|
|
from .const import CONVERSATION_ENTITY_UNIQUE_ID, DATA_CLOUD, DOMAIN
|
|
from .entity import BaseCloudLLMEntity
|
|
|
|
|
|
async def async_setup_entry(
|
|
hass: HomeAssistant,
|
|
config_entry: ConfigEntry,
|
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
|
) -> None:
|
|
"""Set up the Home Assistant Cloud conversation entity."""
|
|
if not (cloud := hass.data[DATA_CLOUD]).is_logged_in:
|
|
return
|
|
try:
|
|
await cloud.llm.async_ensure_token()
|
|
except (LLMError, NabuCasaBaseError):
|
|
return
|
|
|
|
async_add_entities([CloudConversationEntity(cloud, config_entry)])
|
|
|
|
|
|
class CloudConversationEntity(
|
|
conversation.ConversationEntity,
|
|
BaseCloudLLMEntity,
|
|
):
|
|
"""Home Assistant Cloud conversation agent."""
|
|
|
|
_attr_has_entity_name = True
|
|
_attr_name = "Home Assistant Cloud"
|
|
_attr_translation_key = "cloud_conversation"
|
|
_attr_unique_id = CONVERSATION_ENTITY_UNIQUE_ID
|
|
_attr_supported_features = conversation.ConversationEntityFeature.CONTROL
|
|
|
|
@property
|
|
def available(self) -> bool:
|
|
"""Return if the entity is available."""
|
|
return self._cloud.is_logged_in and self._cloud.valid_subscription
|
|
|
|
@property
|
|
def supported_languages(self) -> list[str] | Literal["*"]:
|
|
"""Return a list of supported languages."""
|
|
return MATCH_ALL
|
|
|
|
async def _async_handle_message(
|
|
self,
|
|
user_input: conversation.ConversationInput,
|
|
chat_log: conversation.ChatLog,
|
|
) -> conversation.ConversationResult:
|
|
"""Process a user input."""
|
|
try:
|
|
await chat_log.async_provide_llm_data(
|
|
user_input.as_llm_context(DOMAIN),
|
|
llm.LLM_API_ASSIST,
|
|
None,
|
|
user_input.extra_system_prompt,
|
|
)
|
|
except conversation.ConverseError as err:
|
|
return err.as_conversation_result()
|
|
|
|
await self._async_handle_chat_log("conversation", chat_log)
|
|
|
|
return conversation.async_get_result_from_chat_log(user_input, chat_log)
|