mirror of
https://github.com/home-assistant/core.git
synced 2025-07-24 13:47:35 +00:00
112 lines
4.0 KiB
Python
112 lines
4.0 KiB
Python
"""The conversation platform for the Ollama integration."""
|
|
|
|
from __future__ import annotations
|
|
|
|
from typing import Literal
|
|
|
|
from homeassistant.components import assist_pipeline, conversation
|
|
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
|
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
|
from homeassistant.core import HomeAssistant
|
|
from homeassistant.helpers import intent
|
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|
|
|
from . import OllamaConfigEntry
|
|
from .const import CONF_PROMPT, DOMAIN
|
|
from .entity import OllamaBaseLLMEntity
|
|
|
|
|
|
async def async_setup_entry(
|
|
hass: HomeAssistant,
|
|
config_entry: OllamaConfigEntry,
|
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
|
) -> None:
|
|
"""Set up conversation entities."""
|
|
for subentry in config_entry.subentries.values():
|
|
if subentry.subentry_type != "conversation":
|
|
continue
|
|
|
|
async_add_entities(
|
|
[OllamaConversationEntity(config_entry, subentry)],
|
|
config_subentry_id=subentry.subentry_id,
|
|
)
|
|
|
|
|
|
class OllamaConversationEntity(
|
|
conversation.ConversationEntity,
|
|
conversation.AbstractConversationAgent,
|
|
OllamaBaseLLMEntity,
|
|
):
|
|
"""Ollama conversation agent."""
|
|
|
|
_attr_supports_streaming = True
|
|
|
|
def __init__(self, entry: OllamaConfigEntry, subentry: ConfigSubentry) -> None:
|
|
"""Initialize the agent."""
|
|
super().__init__(entry, subentry)
|
|
if self.subentry.data.get(CONF_LLM_HASS_API):
|
|
self._attr_supported_features = (
|
|
conversation.ConversationEntityFeature.CONTROL
|
|
)
|
|
|
|
async def async_added_to_hass(self) -> None:
|
|
"""When entity is added to Home Assistant."""
|
|
await super().async_added_to_hass()
|
|
assist_pipeline.async_migrate_engine(
|
|
self.hass, "conversation", self.entry.entry_id, self.entity_id
|
|
)
|
|
conversation.async_set_agent(self.hass, self.entry, self)
|
|
self.entry.async_on_unload(
|
|
self.entry.add_update_listener(self._async_entry_update_listener)
|
|
)
|
|
|
|
async def async_will_remove_from_hass(self) -> None:
|
|
"""When entity will be removed from Home Assistant."""
|
|
conversation.async_unset_agent(self.hass, self.entry)
|
|
await super().async_will_remove_from_hass()
|
|
|
|
@property
|
|
def supported_languages(self) -> list[str] | Literal["*"]:
|
|
"""Return a list of supported languages."""
|
|
return MATCH_ALL
|
|
|
|
async def _async_handle_message(
|
|
self,
|
|
user_input: conversation.ConversationInput,
|
|
chat_log: conversation.ChatLog,
|
|
) -> conversation.ConversationResult:
|
|
"""Call the API."""
|
|
settings = {**self.entry.data, **self.subentry.data}
|
|
|
|
try:
|
|
await chat_log.async_provide_llm_data(
|
|
user_input.as_llm_context(DOMAIN),
|
|
settings.get(CONF_LLM_HASS_API),
|
|
settings.get(CONF_PROMPT),
|
|
user_input.extra_system_prompt,
|
|
)
|
|
except conversation.ConverseError as err:
|
|
return err.as_conversation_result()
|
|
|
|
await self._async_handle_chat_log(chat_log)
|
|
|
|
# Create intent response
|
|
intent_response = intent.IntentResponse(language=user_input.language)
|
|
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
|
raise TypeError(
|
|
f"Unexpected last message type: {type(chat_log.content[-1])}"
|
|
)
|
|
intent_response.async_set_speech(chat_log.content[-1].content or "")
|
|
return conversation.ConversationResult(
|
|
response=intent_response,
|
|
conversation_id=chat_log.conversation_id,
|
|
continue_conversation=chat_log.continue_conversation,
|
|
)
|
|
|
|
async def _async_entry_update_listener(
|
|
self, hass: HomeAssistant, entry: ConfigEntry
|
|
) -> None:
|
|
"""Handle options update."""
|
|
# Reload as we update device info + entity name + supported features
|
|
await hass.config_entries.async_reload(entry.entry_id)
|