mirror of
https://github.com/home-assistant/core.git
synced 2025-07-19 11:17:21 +00:00
Migrate OpenAI to config subentries (#147282)
* Migrate OpenAI to config subentries * Add latest changes from Google subentries * Update homeassistant/components/openai_conversation/__init__.py Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com> --------- Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
This commit is contained in:
parent
4d9843172b
commit
8eb906fad9
@ -19,7 +19,7 @@ from openai.types.responses import (
|
|||||||
)
|
)
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||||
from homeassistant.const import CONF_API_KEY, Platform
|
from homeassistant.const import CONF_API_KEY, Platform
|
||||||
from homeassistant.core import (
|
from homeassistant.core import (
|
||||||
HomeAssistant,
|
HomeAssistant,
|
||||||
@ -32,7 +32,12 @@ from homeassistant.exceptions import (
|
|||||||
HomeAssistantError,
|
HomeAssistantError,
|
||||||
ServiceValidationError,
|
ServiceValidationError,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers import config_validation as cv, selector
|
from homeassistant.helpers import (
|
||||||
|
config_validation as cv,
|
||||||
|
device_registry as dr,
|
||||||
|
entity_registry as er,
|
||||||
|
selector,
|
||||||
|
)
|
||||||
from homeassistant.helpers.httpx_client import get_async_client
|
from homeassistant.helpers.httpx_client import get_async_client
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
@ -73,6 +78,7 @@ def encode_file(file_path: str) -> tuple[str, str]:
|
|||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up OpenAI Conversation."""
|
"""Set up OpenAI Conversation."""
|
||||||
|
await async_migrate_integration(hass)
|
||||||
|
|
||||||
async def render_image(call: ServiceCall) -> ServiceResponse:
|
async def render_image(call: ServiceCall) -> ServiceResponse:
|
||||||
"""Render an image with dall-e."""
|
"""Render an image with dall-e."""
|
||||||
@ -118,7 +124,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
translation_placeholders={"config_entry": entry_id},
|
translation_placeholders={"config_entry": entry_id},
|
||||||
)
|
)
|
||||||
|
|
||||||
model: str = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
# Get first conversation subentry for options
|
||||||
|
conversation_subentry = next(
|
||||||
|
(
|
||||||
|
sub
|
||||||
|
for sub in entry.subentries.values()
|
||||||
|
if sub.subentry_type == "conversation"
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if not conversation_subentry:
|
||||||
|
raise ServiceValidationError("No conversation configuration found")
|
||||||
|
|
||||||
|
model: str = conversation_subentry.data.get(
|
||||||
|
CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL
|
||||||
|
)
|
||||||
client: openai.AsyncClient = entry.runtime_data
|
client: openai.AsyncClient = entry.runtime_data
|
||||||
|
|
||||||
content: ResponseInputMessageContentListParam = [
|
content: ResponseInputMessageContentListParam = [
|
||||||
@ -169,11 +189,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
model_args = {
|
model_args = {
|
||||||
"model": model,
|
"model": model,
|
||||||
"input": messages,
|
"input": messages,
|
||||||
"max_output_tokens": entry.options.get(
|
"max_output_tokens": conversation_subentry.data.get(
|
||||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||||
),
|
),
|
||||||
"top_p": entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
"top_p": conversation_subentry.data.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||||
"temperature": entry.options.get(
|
"temperature": conversation_subentry.data.get(
|
||||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||||
),
|
),
|
||||||
"user": call.context.user_id,
|
"user": call.context.user_id,
|
||||||
@ -182,7 +202,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
if model.startswith("o"):
|
if model.startswith("o"):
|
||||||
model_args["reasoning"] = {
|
model_args["reasoning"] = {
|
||||||
"effort": entry.options.get(
|
"effort": conversation_subentry.data.get(
|
||||||
CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT
|
CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -269,3 +289,68 @@ async def async_setup_entry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> bo
|
|||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Unload OpenAI."""
|
"""Unload OpenAI."""
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||||
|
"""Migrate integration entry structure."""
|
||||||
|
|
||||||
|
entries = hass.config_entries.async_entries(DOMAIN)
|
||||||
|
if not any(entry.version == 1 for entry in entries):
|
||||||
|
return
|
||||||
|
|
||||||
|
api_keys_entries: dict[str, ConfigEntry] = {}
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
|
device_registry = dr.async_get(hass)
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
use_existing = False
|
||||||
|
subentry = ConfigSubentry(
|
||||||
|
data=entry.options,
|
||||||
|
subentry_type="conversation",
|
||||||
|
title=entry.title,
|
||||||
|
unique_id=None,
|
||||||
|
)
|
||||||
|
if entry.data[CONF_API_KEY] not in api_keys_entries:
|
||||||
|
use_existing = True
|
||||||
|
api_keys_entries[entry.data[CONF_API_KEY]] = entry
|
||||||
|
|
||||||
|
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]
|
||||||
|
|
||||||
|
hass.config_entries.async_add_subentry(parent_entry, subentry)
|
||||||
|
conversation_entity = entity_registry.async_get_entity_id(
|
||||||
|
"conversation",
|
||||||
|
DOMAIN,
|
||||||
|
entry.entry_id,
|
||||||
|
)
|
||||||
|
if conversation_entity is not None:
|
||||||
|
entity_registry.async_update_entity(
|
||||||
|
conversation_entity,
|
||||||
|
config_entry_id=parent_entry.entry_id,
|
||||||
|
config_subentry_id=subentry.subentry_id,
|
||||||
|
new_unique_id=subentry.subentry_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
device = device_registry.async_get_device(
|
||||||
|
identifiers={(DOMAIN, entry.entry_id)}
|
||||||
|
)
|
||||||
|
if device is not None:
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device.id,
|
||||||
|
new_identifiers={(DOMAIN, subentry.subentry_id)},
|
||||||
|
add_config_subentry_id=subentry.subentry_id,
|
||||||
|
add_config_entry_id=parent_entry.entry_id,
|
||||||
|
)
|
||||||
|
if parent_entry.entry_id != entry.entry_id:
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device.id,
|
||||||
|
remove_config_entry_id=entry.entry_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not use_existing:
|
||||||
|
await hass.config_entries.async_remove(entry.entry_id)
|
||||||
|
else:
|
||||||
|
hass.config_entries.async_update_entry(
|
||||||
|
entry,
|
||||||
|
options={},
|
||||||
|
version=2,
|
||||||
|
)
|
||||||
|
@ -13,17 +13,20 @@ from voluptuous_openapi import convert
|
|||||||
from homeassistant.components.zone import ENTITY_ID_HOME
|
from homeassistant.components.zone import ENTITY_ID_HOME
|
||||||
from homeassistant.config_entries import (
|
from homeassistant.config_entries import (
|
||||||
ConfigEntry,
|
ConfigEntry,
|
||||||
|
ConfigEntryState,
|
||||||
ConfigFlow,
|
ConfigFlow,
|
||||||
ConfigFlowResult,
|
ConfigFlowResult,
|
||||||
OptionsFlow,
|
ConfigSubentryFlow,
|
||||||
|
SubentryFlowResult,
|
||||||
)
|
)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_LATITUDE,
|
ATTR_LATITUDE,
|
||||||
ATTR_LONGITUDE,
|
ATTR_LONGITUDE,
|
||||||
CONF_API_KEY,
|
CONF_API_KEY,
|
||||||
CONF_LLM_HASS_API,
|
CONF_LLM_HASS_API,
|
||||||
|
CONF_NAME,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import llm
|
from homeassistant.helpers import llm
|
||||||
from homeassistant.helpers.httpx_client import get_async_client
|
from homeassistant.helpers.httpx_client import get_async_client
|
||||||
from homeassistant.helpers.selector import (
|
from homeassistant.helpers.selector import (
|
||||||
@ -52,6 +55,7 @@ from .const import (
|
|||||||
CONF_WEB_SEARCH_REGION,
|
CONF_WEB_SEARCH_REGION,
|
||||||
CONF_WEB_SEARCH_TIMEZONE,
|
CONF_WEB_SEARCH_TIMEZONE,
|
||||||
CONF_WEB_SEARCH_USER_LOCATION,
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
|
DEFAULT_CONVERSATION_NAME,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
@ -94,7 +98,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
|||||||
class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
"""Handle a config flow for OpenAI Conversation."""
|
"""Handle a config flow for OpenAI Conversation."""
|
||||||
|
|
||||||
VERSION = 1
|
VERSION = 2
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
@ -107,6 +111,7 @@ class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
self._async_abort_entries_match(user_input)
|
||||||
try:
|
try:
|
||||||
await validate_input(self.hass, user_input)
|
await validate_input(self.hass, user_input)
|
||||||
except openai.APIConnectionError:
|
except openai.APIConnectionError:
|
||||||
@ -120,32 +125,61 @@ class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title="ChatGPT",
|
title="ChatGPT",
|
||||||
data=user_input,
|
data=user_input,
|
||||||
options=RECOMMENDED_OPTIONS,
|
subentries=[
|
||||||
|
{
|
||||||
|
"subentry_type": "conversation",
|
||||||
|
"data": RECOMMENDED_OPTIONS,
|
||||||
|
"title": DEFAULT_CONVERSATION_NAME,
|
||||||
|
"unique_id": None,
|
||||||
|
}
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
def async_get_options_flow(
|
@callback
|
||||||
config_entry: ConfigEntry,
|
def async_get_supported_subentry_types(
|
||||||
) -> OptionsFlow:
|
cls, config_entry: ConfigEntry
|
||||||
"""Create the options flow."""
|
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||||
return OpenAIOptionsFlow(config_entry)
|
"""Return subentries supported by this integration."""
|
||||||
|
return {"conversation": ConversationSubentryFlowHandler}
|
||||||
|
|
||||||
|
|
||||||
class OpenAIOptionsFlow(OptionsFlow):
|
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||||
"""OpenAI config flow options handler."""
|
"""Flow for managing conversation subentries."""
|
||||||
|
|
||||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
last_rendered_recommended = False
|
||||||
"""Initialize options flow."""
|
options: dict[str, Any]
|
||||||
self.options = config_entry.options.copy()
|
|
||||||
|
@property
|
||||||
|
def _is_new(self) -> bool:
|
||||||
|
"""Return if this is a new subentry."""
|
||||||
|
return self.source == "user"
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> SubentryFlowResult:
|
||||||
|
"""Add a subentry."""
|
||||||
|
self.options = RECOMMENDED_OPTIONS.copy()
|
||||||
|
return await self.async_step_init()
|
||||||
|
|
||||||
|
async def async_step_reconfigure(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> SubentryFlowResult:
|
||||||
|
"""Handle reconfiguration of a subentry."""
|
||||||
|
self.options = self._get_reconfigure_subentry().data.copy()
|
||||||
|
return await self.async_step_init()
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> SubentryFlowResult:
|
||||||
"""Manage initial options."""
|
"""Manage initial options."""
|
||||||
|
# abort if entry is not loaded
|
||||||
|
if self._get_entry().state != ConfigEntryState.LOADED:
|
||||||
|
return self.async_abort(reason="entry_not_loaded")
|
||||||
options = self.options
|
options = self.options
|
||||||
|
|
||||||
hass_apis: list[SelectOptionDict] = [
|
hass_apis: list[SelectOptionDict] = [
|
||||||
@ -160,25 +194,47 @@ class OpenAIOptionsFlow(OptionsFlow):
|
|||||||
):
|
):
|
||||||
options[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
options[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||||
|
|
||||||
step_schema: VolDictType = {
|
step_schema: VolDictType = {}
|
||||||
vol.Optional(
|
|
||||||
CONF_PROMPT,
|
if self._is_new:
|
||||||
description={"suggested_value": llm.DEFAULT_INSTRUCTIONS_PROMPT},
|
step_schema[vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME)] = (
|
||||||
): TemplateSelector(),
|
str
|
||||||
vol.Optional(CONF_LLM_HASS_API): SelectSelector(
|
)
|
||||||
SelectSelectorConfig(options=hass_apis, multiple=True)
|
|
||||||
),
|
step_schema.update(
|
||||||
vol.Required(
|
{
|
||||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
vol.Optional(
|
||||||
): bool,
|
CONF_PROMPT,
|
||||||
}
|
description={
|
||||||
|
"suggested_value": options.get(
|
||||||
|
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||||
|
)
|
||||||
|
},
|
||||||
|
): TemplateSelector(),
|
||||||
|
vol.Optional(CONF_LLM_HASS_API): SelectSelector(
|
||||||
|
SelectSelectorConfig(options=hass_apis, multiple=True)
|
||||||
|
),
|
||||||
|
vol.Required(
|
||||||
|
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||||
|
): bool,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
if not user_input.get(CONF_LLM_HASS_API):
|
if not user_input.get(CONF_LLM_HASS_API):
|
||||||
user_input.pop(CONF_LLM_HASS_API, None)
|
user_input.pop(CONF_LLM_HASS_API, None)
|
||||||
|
|
||||||
if user_input[CONF_RECOMMENDED]:
|
if user_input[CONF_RECOMMENDED]:
|
||||||
return self.async_create_entry(title="", data=user_input)
|
if self._is_new:
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=user_input.pop(CONF_NAME),
|
||||||
|
data=user_input,
|
||||||
|
)
|
||||||
|
return self.async_update_and_abort(
|
||||||
|
self._get_entry(),
|
||||||
|
self._get_reconfigure_subentry(),
|
||||||
|
data=user_input,
|
||||||
|
)
|
||||||
|
|
||||||
options.update(user_input)
|
options.update(user_input)
|
||||||
if CONF_LLM_HASS_API in options and CONF_LLM_HASS_API not in user_input:
|
if CONF_LLM_HASS_API in options and CONF_LLM_HASS_API not in user_input:
|
||||||
@ -194,7 +250,7 @@ class OpenAIOptionsFlow(OptionsFlow):
|
|||||||
|
|
||||||
async def async_step_advanced(
|
async def async_step_advanced(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> SubentryFlowResult:
|
||||||
"""Manage advanced options."""
|
"""Manage advanced options."""
|
||||||
options = self.options
|
options = self.options
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
@ -236,7 +292,7 @@ class OpenAIOptionsFlow(OptionsFlow):
|
|||||||
|
|
||||||
async def async_step_model(
|
async def async_step_model(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> SubentryFlowResult:
|
||||||
"""Manage model-specific options."""
|
"""Manage model-specific options."""
|
||||||
options = self.options
|
options = self.options
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
@ -303,7 +359,16 @@ class OpenAIOptionsFlow(OptionsFlow):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if not step_schema:
|
if not step_schema:
|
||||||
return self.async_create_entry(title="", data=options)
|
if self._is_new:
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=options.pop(CONF_NAME, DEFAULT_CONVERSATION_NAME),
|
||||||
|
data=options,
|
||||||
|
)
|
||||||
|
return self.async_update_and_abort(
|
||||||
|
self._get_entry(),
|
||||||
|
self._get_reconfigure_subentry(),
|
||||||
|
data=options,
|
||||||
|
)
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
if user_input.get(CONF_WEB_SEARCH):
|
if user_input.get(CONF_WEB_SEARCH):
|
||||||
@ -316,7 +381,16 @@ class OpenAIOptionsFlow(OptionsFlow):
|
|||||||
options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
||||||
|
|
||||||
options.update(user_input)
|
options.update(user_input)
|
||||||
return self.async_create_entry(title="", data=options)
|
if self._is_new:
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=options.pop(CONF_NAME, DEFAULT_CONVERSATION_NAME),
|
||||||
|
data=options,
|
||||||
|
)
|
||||||
|
return self.async_update_and_abort(
|
||||||
|
self._get_entry(),
|
||||||
|
self._get_reconfigure_subentry(),
|
||||||
|
data=options,
|
||||||
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="model",
|
step_id="model",
|
||||||
@ -332,7 +406,7 @@ class OpenAIOptionsFlow(OptionsFlow):
|
|||||||
zone_home = self.hass.states.get(ENTITY_ID_HOME)
|
zone_home = self.hass.states.get(ENTITY_ID_HOME)
|
||||||
if zone_home is not None:
|
if zone_home is not None:
|
||||||
client = openai.AsyncOpenAI(
|
client = openai.AsyncOpenAI(
|
||||||
api_key=self.config_entry.data[CONF_API_KEY],
|
api_key=self._get_entry().data[CONF_API_KEY],
|
||||||
http_client=get_async_client(self.hass),
|
http_client=get_async_client(self.hass),
|
||||||
)
|
)
|
||||||
location_schema = vol.Schema(
|
location_schema = vol.Schema(
|
||||||
|
@ -5,6 +5,8 @@ import logging
|
|||||||
DOMAIN = "openai_conversation"
|
DOMAIN = "openai_conversation"
|
||||||
LOGGER: logging.Logger = logging.getLogger(__package__)
|
LOGGER: logging.Logger = logging.getLogger(__package__)
|
||||||
|
|
||||||
|
DEFAULT_CONVERSATION_NAME = "OpenAI Conversation"
|
||||||
|
|
||||||
CONF_CHAT_MODEL = "chat_model"
|
CONF_CHAT_MODEL = "chat_model"
|
||||||
CONF_FILENAMES = "filenames"
|
CONF_FILENAMES = "filenames"
|
||||||
CONF_MAX_TOKENS = "max_tokens"
|
CONF_MAX_TOKENS = "max_tokens"
|
||||||
|
@ -34,7 +34,7 @@ from openai.types.responses.web_search_tool_param import UserLocation
|
|||||||
from voluptuous_openapi import convert
|
from voluptuous_openapi import convert
|
||||||
|
|
||||||
from homeassistant.components import assist_pipeline, conversation
|
from homeassistant.components import assist_pipeline, conversation
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
@ -76,8 +76,14 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up conversation entities."""
|
"""Set up conversation entities."""
|
||||||
agent = OpenAIConversationEntity(config_entry)
|
for subentry in config_entry.subentries.values():
|
||||||
async_add_entities([agent])
|
if subentry.subentry_type != "conversation":
|
||||||
|
continue
|
||||||
|
|
||||||
|
async_add_entities(
|
||||||
|
[OpenAIConversationEntity(config_entry, subentry)],
|
||||||
|
config_subentry_id=subentry.subentry_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _format_tool(
|
def _format_tool(
|
||||||
@ -229,22 +235,22 @@ class OpenAIConversationEntity(
|
|||||||
):
|
):
|
||||||
"""OpenAI conversation agent."""
|
"""OpenAI conversation agent."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
_attr_name = None
|
|
||||||
_attr_supports_streaming = True
|
_attr_supports_streaming = True
|
||||||
|
|
||||||
def __init__(self, entry: OpenAIConfigEntry) -> None:
|
def __init__(self, entry: OpenAIConfigEntry, subentry: ConfigSubentry) -> None:
|
||||||
"""Initialize the agent."""
|
"""Initialize the agent."""
|
||||||
self.entry = entry
|
self.entry = entry
|
||||||
self._attr_unique_id = entry.entry_id
|
self.subentry = subentry
|
||||||
|
self._attr_name = subentry.title
|
||||||
|
self._attr_unique_id = subentry.subentry_id
|
||||||
self._attr_device_info = dr.DeviceInfo(
|
self._attr_device_info = dr.DeviceInfo(
|
||||||
identifiers={(DOMAIN, entry.entry_id)},
|
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||||
name=entry.title,
|
name=subentry.title,
|
||||||
manufacturer="OpenAI",
|
manufacturer="OpenAI",
|
||||||
model="ChatGPT",
|
model=entry.data.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
||||||
entry_type=dr.DeviceEntryType.SERVICE,
|
entry_type=dr.DeviceEntryType.SERVICE,
|
||||||
)
|
)
|
||||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
if self.subentry.data.get(CONF_LLM_HASS_API):
|
||||||
self._attr_supported_features = (
|
self._attr_supported_features = (
|
||||||
conversation.ConversationEntityFeature.CONTROL
|
conversation.ConversationEntityFeature.CONTROL
|
||||||
)
|
)
|
||||||
@ -276,7 +282,7 @@ class OpenAIConversationEntity(
|
|||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
) -> conversation.ConversationResult:
|
) -> conversation.ConversationResult:
|
||||||
"""Process the user input and call the API."""
|
"""Process the user input and call the API."""
|
||||||
options = self.entry.options
|
options = self.subentry.data
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await chat_log.async_provide_llm_data(
|
await chat_log.async_provide_llm_data(
|
||||||
@ -304,7 +310,7 @@ class OpenAIConversationEntity(
|
|||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Generate an answer for the chat log."""
|
"""Generate an answer for the chat log."""
|
||||||
options = self.entry.options
|
options = self.subentry.data
|
||||||
|
|
||||||
tools: list[ToolParam] | None = None
|
tools: list[ToolParam] | None = None
|
||||||
if chat_log.llm_api:
|
if chat_log.llm_api:
|
||||||
|
@ -11,47 +11,63 @@
|
|||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
|
},
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"options": {
|
"config_subentries": {
|
||||||
"step": {
|
"conversation": {
|
||||||
"init": {
|
"initiate_flow": {
|
||||||
"data": {
|
"user": "Add conversation agent",
|
||||||
"prompt": "Instructions",
|
"reconfigure": "Reconfigure conversation agent"
|
||||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
},
|
||||||
"recommended": "Recommended model settings"
|
"entry_type": "Conversation agent",
|
||||||
|
|
||||||
|
"step": {
|
||||||
|
"init": {
|
||||||
|
"data": {
|
||||||
|
"name": "[%key:common::config_flow::data::name%]",
|
||||||
|
"prompt": "Instructions",
|
||||||
|
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||||
|
"recommended": "Recommended model settings"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"data_description": {
|
"advanced": {
|
||||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
"title": "Advanced settings",
|
||||||
|
"data": {
|
||||||
|
"chat_model": "[%key:common::generic::model%]",
|
||||||
|
"max_tokens": "Maximum tokens to return in response",
|
||||||
|
"temperature": "Temperature",
|
||||||
|
"top_p": "Top P"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"model": {
|
||||||
|
"title": "Model-specific options",
|
||||||
|
"data": {
|
||||||
|
"reasoning_effort": "Reasoning effort",
|
||||||
|
"web_search": "Enable web search",
|
||||||
|
"search_context_size": "Search context size",
|
||||||
|
"user_location": "Include home location"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt",
|
||||||
|
"web_search": "Allow the model to search the web for the latest information before generating a response",
|
||||||
|
"search_context_size": "High level guidance for the amount of context window space to use for the search",
|
||||||
|
"user_location": "Refine search results based on geography"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"advanced": {
|
"abort": {
|
||||||
"title": "Advanced settings",
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||||
"data": {
|
"entry_not_loaded": "Cannot add things while the configuration is disabled."
|
||||||
"chat_model": "[%key:common::generic::model%]",
|
|
||||||
"max_tokens": "Maximum tokens to return in response",
|
|
||||||
"temperature": "Temperature",
|
|
||||||
"top_p": "Top P"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"model": {
|
"error": {
|
||||||
"title": "Model-specific options",
|
"model_not_supported": "This model is not supported, please select a different model"
|
||||||
"data": {
|
|
||||||
"reasoning_effort": "Reasoning effort",
|
|
||||||
"web_search": "Enable web search",
|
|
||||||
"search_context_size": "Search context size",
|
|
||||||
"user_location": "Include home location"
|
|
||||||
},
|
|
||||||
"data_description": {
|
|
||||||
"reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt",
|
|
||||||
"web_search": "Allow the model to search the web for the latest information before generating a response",
|
|
||||||
"search_context_size": "High level guidance for the amount of context window space to use for the search",
|
|
||||||
"user_location": "Refine search results based on geography"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"error": {
|
|
||||||
"model_not_supported": "This model is not supported, please select a different model"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"selector": {
|
"selector": {
|
||||||
|
@ -4,6 +4,7 @@ from unittest.mock import patch
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from homeassistant.components.openai_conversation.const import DEFAULT_CONVERSATION_NAME
|
||||||
from homeassistant.const import CONF_LLM_HASS_API
|
from homeassistant.const import CONF_LLM_HASS_API
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import llm
|
from homeassistant.helpers import llm
|
||||||
@ -21,6 +22,15 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
|||||||
data={
|
data={
|
||||||
"api_key": "bla",
|
"api_key": "bla",
|
||||||
},
|
},
|
||||||
|
version=2,
|
||||||
|
subentries_data=[
|
||||||
|
{
|
||||||
|
"data": {},
|
||||||
|
"subentry_type": "conversation",
|
||||||
|
"title": DEFAULT_CONVERSATION_NAME,
|
||||||
|
"unique_id": None,
|
||||||
|
}
|
||||||
|
],
|
||||||
)
|
)
|
||||||
entry.add_to_hass(hass)
|
entry.add_to_hass(hass)
|
||||||
return entry
|
return entry
|
||||||
@ -31,8 +41,10 @@ def mock_config_entry_with_assist(
|
|||||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||||
) -> MockConfigEntry:
|
) -> MockConfigEntry:
|
||||||
"""Mock a config entry with assist."""
|
"""Mock a config entry with assist."""
|
||||||
hass.config_entries.async_update_entry(
|
hass.config_entries.async_update_subentry(
|
||||||
mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST}
|
mock_config_entry,
|
||||||
|
next(iter(mock_config_entry.subentries.values())),
|
||||||
|
data={CONF_LLM_HASS_API: llm.LLM_API_ASSIST},
|
||||||
)
|
)
|
||||||
return mock_config_entry
|
return mock_config_entry
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
'role': 'user',
|
'role': 'user',
|
||||||
}),
|
}),
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'conversation.openai',
|
'agent_id': 'conversation.openai_conversation',
|
||||||
'content': None,
|
'content': None,
|
||||||
'role': 'assistant',
|
'role': 'assistant',
|
||||||
'tool_calls': list([
|
'tool_calls': list([
|
||||||
@ -20,14 +20,14 @@
|
|||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'conversation.openai',
|
'agent_id': 'conversation.openai_conversation',
|
||||||
'role': 'tool_result',
|
'role': 'tool_result',
|
||||||
'tool_call_id': 'call_call_1',
|
'tool_call_id': 'call_call_1',
|
||||||
'tool_name': 'test_tool',
|
'tool_name': 'test_tool',
|
||||||
'tool_result': 'value1',
|
'tool_result': 'value1',
|
||||||
}),
|
}),
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'conversation.openai',
|
'agent_id': 'conversation.openai_conversation',
|
||||||
'content': None,
|
'content': None,
|
||||||
'role': 'assistant',
|
'role': 'assistant',
|
||||||
'tool_calls': list([
|
'tool_calls': list([
|
||||||
@ -41,14 +41,14 @@
|
|||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'conversation.openai',
|
'agent_id': 'conversation.openai_conversation',
|
||||||
'role': 'tool_result',
|
'role': 'tool_result',
|
||||||
'tool_call_id': 'call_call_2',
|
'tool_call_id': 'call_call_2',
|
||||||
'tool_name': 'test_tool',
|
'tool_name': 'test_tool',
|
||||||
'tool_result': 'value2',
|
'tool_result': 'value2',
|
||||||
}),
|
}),
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'conversation.openai',
|
'agent_id': 'conversation.openai_conversation',
|
||||||
'content': 'Cool',
|
'content': 'Cool',
|
||||||
'role': 'assistant',
|
'role': 'assistant',
|
||||||
'tool_calls': None,
|
'tool_calls': None,
|
||||||
@ -62,7 +62,7 @@
|
|||||||
'role': 'user',
|
'role': 'user',
|
||||||
}),
|
}),
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'conversation.openai',
|
'agent_id': 'conversation.openai_conversation',
|
||||||
'content': None,
|
'content': None,
|
||||||
'role': 'assistant',
|
'role': 'assistant',
|
||||||
'tool_calls': list([
|
'tool_calls': list([
|
||||||
@ -76,14 +76,14 @@
|
|||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'conversation.openai',
|
'agent_id': 'conversation.openai_conversation',
|
||||||
'role': 'tool_result',
|
'role': 'tool_result',
|
||||||
'tool_call_id': 'call_call_1',
|
'tool_call_id': 'call_call_1',
|
||||||
'tool_name': 'test_tool',
|
'tool_name': 'test_tool',
|
||||||
'tool_result': 'value1',
|
'tool_result': 'value1',
|
||||||
}),
|
}),
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'conversation.openai',
|
'agent_id': 'conversation.openai_conversation',
|
||||||
'content': 'Cool',
|
'content': 'Cool',
|
||||||
'role': 'assistant',
|
'role': 'assistant',
|
||||||
'tool_calls': None,
|
'tool_calls': None,
|
||||||
|
@ -24,12 +24,13 @@ from homeassistant.components.openai_conversation.const import (
|
|||||||
CONF_WEB_SEARCH_REGION,
|
CONF_WEB_SEARCH_REGION,
|
||||||
CONF_WEB_SEARCH_TIMEZONE,
|
CONF_WEB_SEARCH_TIMEZONE,
|
||||||
CONF_WEB_SEARCH_USER_LOCATION,
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
|
DEFAULT_CONVERSATION_NAME,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
RECOMMENDED_TOP_P,
|
RECOMMENDED_TOP_P,
|
||||||
)
|
)
|
||||||
from homeassistant.const import CONF_LLM_HASS_API
|
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.data_entry_flow import FlowResultType
|
from homeassistant.data_entry_flow import FlowResultType
|
||||||
|
|
||||||
@ -72,42 +73,132 @@ async def test_form(hass: HomeAssistant) -> None:
|
|||||||
assert result2["data"] == {
|
assert result2["data"] == {
|
||||||
"api_key": "bla",
|
"api_key": "bla",
|
||||||
}
|
}
|
||||||
assert result2["options"] == RECOMMENDED_OPTIONS
|
assert result2["options"] == {}
|
||||||
|
assert result2["subentries"] == [
|
||||||
|
{
|
||||||
|
"subentry_type": "conversation",
|
||||||
|
"data": RECOMMENDED_OPTIONS,
|
||||||
|
"title": DEFAULT_CONVERSATION_NAME,
|
||||||
|
"unique_id": None,
|
||||||
|
}
|
||||||
|
]
|
||||||
assert len(mock_setup_entry.mock_calls) == 1
|
assert len(mock_setup_entry.mock_calls) == 1
|
||||||
|
|
||||||
|
|
||||||
async def test_options_recommended(
|
async def test_duplicate_entry(hass: HomeAssistant) -> None:
|
||||||
|
"""Test we abort on duplicate config entry."""
|
||||||
|
MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={CONF_API_KEY: "bla"},
|
||||||
|
).add_to_hass(hass)
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||||
|
)
|
||||||
|
assert result["type"] is FlowResultType.FORM
|
||||||
|
assert not result["errors"]
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.openai_conversation.config_flow.openai.resources.models.AsyncModels.list",
|
||||||
|
):
|
||||||
|
result = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
{
|
||||||
|
CONF_API_KEY: "bla",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.ABORT
|
||||||
|
assert result["reason"] == "already_configured"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_creating_conversation_subentry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_init_component: None,
|
||||||
|
mock_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test creating a conversation subentry."""
|
||||||
|
mock_config_entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
result = await hass.config_entries.subentries.async_init(
|
||||||
|
(mock_config_entry.entry_id, "conversation"),
|
||||||
|
context={"source": config_entries.SOURCE_USER},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.FORM
|
||||||
|
assert result["step_id"] == "init"
|
||||||
|
assert not result["errors"]
|
||||||
|
|
||||||
|
result2 = await hass.config_entries.subentries.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
{"name": "My Custom Agent", **RECOMMENDED_OPTIONS},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||||
|
assert result2["title"] == "My Custom Agent"
|
||||||
|
|
||||||
|
processed_options = RECOMMENDED_OPTIONS.copy()
|
||||||
|
processed_options[CONF_PROMPT] = processed_options[CONF_PROMPT].strip()
|
||||||
|
|
||||||
|
assert result2["data"] == processed_options
|
||||||
|
|
||||||
|
|
||||||
|
async def test_creating_conversation_subentry_not_loaded(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_init_component,
|
||||||
|
mock_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test creating a conversation subentry when entry is not loaded."""
|
||||||
|
await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.openai_conversation.config_flow.openai.resources.models.AsyncModels.list",
|
||||||
|
return_value=[],
|
||||||
|
):
|
||||||
|
result = await hass.config_entries.subentries.async_init(
|
||||||
|
(mock_config_entry.entry_id, "conversation"),
|
||||||
|
context={"source": config_entries.SOURCE_USER},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.ABORT
|
||||||
|
assert result["reason"] == "entry_not_loaded"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_subentry_recommended(
|
||||||
hass: HomeAssistant, mock_config_entry, mock_init_component
|
hass: HomeAssistant, mock_config_entry, mock_init_component
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the options flow with recommended settings."""
|
"""Test the subentry flow with recommended settings."""
|
||||||
options_flow = await hass.config_entries.options.async_init(
|
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||||
mock_config_entry.entry_id
|
subentry_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||||
|
hass, subentry.subentry_id
|
||||||
)
|
)
|
||||||
options = await hass.config_entries.options.async_configure(
|
options = await hass.config_entries.subentries.async_configure(
|
||||||
options_flow["flow_id"],
|
subentry_flow["flow_id"],
|
||||||
{
|
{
|
||||||
"prompt": "Speak like a pirate",
|
"prompt": "Speak like a pirate",
|
||||||
"recommended": True,
|
"recommended": True,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert options["type"] is FlowResultType.CREATE_ENTRY
|
assert options["type"] is FlowResultType.ABORT
|
||||||
assert options["data"]["prompt"] == "Speak like a pirate"
|
assert options["reason"] == "reconfigure_successful"
|
||||||
|
assert subentry.data["prompt"] == "Speak like a pirate"
|
||||||
|
|
||||||
|
|
||||||
async def test_options_unsupported_model(
|
async def test_subentry_unsupported_model(
|
||||||
hass: HomeAssistant, mock_config_entry, mock_init_component
|
hass: HomeAssistant, mock_config_entry, mock_init_component
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the options form giving error about models not supported."""
|
"""Test the subentry form giving error about models not supported."""
|
||||||
options_flow = await hass.config_entries.options.async_init(
|
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||||
mock_config_entry.entry_id
|
subentry_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||||
|
hass, subentry.subentry_id
|
||||||
)
|
)
|
||||||
assert options_flow["type"] == FlowResultType.FORM
|
assert subentry_flow["type"] == FlowResultType.FORM
|
||||||
assert options_flow["step_id"] == "init"
|
assert subentry_flow["step_id"] == "init"
|
||||||
|
|
||||||
# Configure initial step
|
# Configure initial step
|
||||||
options_flow = await hass.config_entries.options.async_configure(
|
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||||
options_flow["flow_id"],
|
subentry_flow["flow_id"],
|
||||||
{
|
{
|
||||||
CONF_RECOMMENDED: False,
|
CONF_RECOMMENDED: False,
|
||||||
CONF_PROMPT: "Speak like a pirate",
|
CONF_PROMPT: "Speak like a pirate",
|
||||||
@ -115,19 +206,19 @@ async def test_options_unsupported_model(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert options_flow["type"] == FlowResultType.FORM
|
assert subentry_flow["type"] == FlowResultType.FORM
|
||||||
assert options_flow["step_id"] == "advanced"
|
assert subentry_flow["step_id"] == "advanced"
|
||||||
|
|
||||||
# Configure advanced step
|
# Configure advanced step
|
||||||
options_flow = await hass.config_entries.options.async_configure(
|
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||||
options_flow["flow_id"],
|
subentry_flow["flow_id"],
|
||||||
{
|
{
|
||||||
CONF_CHAT_MODEL: "o1-mini",
|
CONF_CHAT_MODEL: "o1-mini",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert options_flow["type"] is FlowResultType.FORM
|
assert subentry_flow["type"] is FlowResultType.FORM
|
||||||
assert options_flow["errors"] == {"chat_model": "model_not_supported"}
|
assert subentry_flow["errors"] == {"chat_model": "model_not_supported"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@ -494,7 +585,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_options_switching(
|
async def test_subentry_switching(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_config_entry,
|
mock_config_entry,
|
||||||
mock_init_component,
|
mock_init_component,
|
||||||
@ -502,16 +593,22 @@ async def test_options_switching(
|
|||||||
new_options,
|
new_options,
|
||||||
expected_options,
|
expected_options,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the options form."""
|
"""Test the subentry form."""
|
||||||
hass.config_entries.async_update_entry(mock_config_entry, options=current_options)
|
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||||
options = await hass.config_entries.options.async_init(mock_config_entry.entry_id)
|
hass.config_entries.async_update_subentry(
|
||||||
assert options["step_id"] == "init"
|
mock_config_entry, subentry, data=current_options
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
subentry_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||||
|
hass, subentry.subentry_id
|
||||||
|
)
|
||||||
|
assert subentry_flow["step_id"] == "init"
|
||||||
|
|
||||||
for step_options in new_options:
|
for step_options in new_options:
|
||||||
assert options["type"] == FlowResultType.FORM
|
assert subentry_flow["type"] == FlowResultType.FORM
|
||||||
|
|
||||||
# Test that current options are showed as suggested values:
|
# Test that current options are showed as suggested values:
|
||||||
for key in options["data_schema"].schema:
|
for key in subentry_flow["data_schema"].schema:
|
||||||
if (
|
if (
|
||||||
isinstance(key.description, dict)
|
isinstance(key.description, dict)
|
||||||
and "suggested_value" in key.description
|
and "suggested_value" in key.description
|
||||||
@ -523,38 +620,42 @@ async def test_options_switching(
|
|||||||
assert key.description["suggested_value"] == current_option
|
assert key.description["suggested_value"] == current_option
|
||||||
|
|
||||||
# Configure current step
|
# Configure current step
|
||||||
options = await hass.config_entries.options.async_configure(
|
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||||
options["flow_id"],
|
subentry_flow["flow_id"],
|
||||||
step_options,
|
step_options,
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert options["type"] is FlowResultType.CREATE_ENTRY
|
assert subentry_flow["type"] is FlowResultType.ABORT
|
||||||
assert options["data"] == expected_options
|
assert subentry_flow["reason"] == "reconfigure_successful"
|
||||||
|
assert subentry.data == expected_options
|
||||||
|
|
||||||
|
|
||||||
async def test_options_web_search_user_location(
|
async def test_subentry_web_search_user_location(
|
||||||
hass: HomeAssistant, mock_config_entry, mock_init_component
|
hass: HomeAssistant, mock_config_entry, mock_init_component
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test fetching user location."""
|
"""Test fetching user location."""
|
||||||
options = await hass.config_entries.options.async_init(mock_config_entry.entry_id)
|
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||||
assert options["type"] == FlowResultType.FORM
|
subentry_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||||
assert options["step_id"] == "init"
|
hass, subentry.subentry_id
|
||||||
|
)
|
||||||
|
assert subentry_flow["type"] == FlowResultType.FORM
|
||||||
|
assert subentry_flow["step_id"] == "init"
|
||||||
|
|
||||||
# Configure initial step
|
# Configure initial step
|
||||||
options = await hass.config_entries.options.async_configure(
|
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||||
options["flow_id"],
|
subentry_flow["flow_id"],
|
||||||
{
|
{
|
||||||
CONF_RECOMMENDED: False,
|
CONF_RECOMMENDED: False,
|
||||||
CONF_PROMPT: "Speak like a pirate",
|
CONF_PROMPT: "Speak like a pirate",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
assert options["type"] == FlowResultType.FORM
|
assert subentry_flow["type"] == FlowResultType.FORM
|
||||||
assert options["step_id"] == "advanced"
|
assert subentry_flow["step_id"] == "advanced"
|
||||||
|
|
||||||
# Configure advanced step
|
# Configure advanced step
|
||||||
options = await hass.config_entries.options.async_configure(
|
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||||
options["flow_id"],
|
subentry_flow["flow_id"],
|
||||||
{
|
{
|
||||||
CONF_TEMPERATURE: 1.0,
|
CONF_TEMPERATURE: 1.0,
|
||||||
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
|
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
|
||||||
@ -563,8 +664,8 @@ async def test_options_web_search_user_location(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert options["type"] == FlowResultType.FORM
|
assert subentry_flow["type"] == FlowResultType.FORM
|
||||||
assert options["step_id"] == "model"
|
assert subentry_flow["step_id"] == "model"
|
||||||
|
|
||||||
hass.config.country = "US"
|
hass.config.country = "US"
|
||||||
hass.config.time_zone = "America/Los_Angeles"
|
hass.config.time_zone = "America/Los_Angeles"
|
||||||
@ -601,8 +702,8 @@ async def test_options_web_search_user_location(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Configure model step
|
# Configure model step
|
||||||
options = await hass.config_entries.options.async_configure(
|
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||||
options["flow_id"],
|
subentry_flow["flow_id"],
|
||||||
{
|
{
|
||||||
CONF_WEB_SEARCH: True,
|
CONF_WEB_SEARCH: True,
|
||||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "medium",
|
CONF_WEB_SEARCH_CONTEXT_SIZE: "medium",
|
||||||
@ -614,8 +715,9 @@ async def test_options_web_search_user_location(
|
|||||||
mock_create.call_args.kwargs["input"][0]["content"] == "Where are the following"
|
mock_create.call_args.kwargs["input"][0]["content"] == "Where are the following"
|
||||||
" coordinates located: (37.7749, -122.4194)?"
|
" coordinates located: (37.7749, -122.4194)?"
|
||||||
)
|
)
|
||||||
assert options["type"] is FlowResultType.CREATE_ENTRY
|
assert subentry_flow["type"] is FlowResultType.ABORT
|
||||||
assert options["data"] == {
|
assert subentry_flow["reason"] == "reconfigure_successful"
|
||||||
|
assert subentry.data == {
|
||||||
CONF_RECOMMENDED: False,
|
CONF_RECOMMENDED: False,
|
||||||
CONF_PROMPT: "Speak like a pirate",
|
CONF_PROMPT: "Speak like a pirate",
|
||||||
CONF_TEMPERATURE: 1.0,
|
CONF_TEMPERATURE: 1.0,
|
||||||
|
@ -153,20 +153,18 @@ async def test_entity(
|
|||||||
mock_init_component,
|
mock_init_component,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test entity properties."""
|
"""Test entity properties."""
|
||||||
state = hass.states.get("conversation.openai")
|
state = hass.states.get("conversation.openai_conversation")
|
||||||
assert state
|
assert state
|
||||||
assert state.attributes["supported_features"] == 0
|
assert state.attributes["supported_features"] == 0
|
||||||
|
|
||||||
hass.config_entries.async_update_entry(
|
hass.config_entries.async_update_subentry(
|
||||||
mock_config_entry,
|
mock_config_entry,
|
||||||
options={
|
next(iter(mock_config_entry.subentries.values())),
|
||||||
**mock_config_entry.options,
|
data={CONF_LLM_HASS_API: "assist"},
|
||||||
CONF_LLM_HASS_API: "assist",
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
await hass.config_entries.async_reload(mock_config_entry.entry_id)
|
await hass.config_entries.async_reload(mock_config_entry.entry_id)
|
||||||
|
|
||||||
state = hass.states.get("conversation.openai")
|
state = hass.states.get("conversation.openai_conversation")
|
||||||
assert state
|
assert state
|
||||||
assert (
|
assert (
|
||||||
state.attributes["supported_features"]
|
state.attributes["supported_features"]
|
||||||
@ -261,7 +259,7 @@ async def test_incomplete_response(
|
|||||||
"Please tell me a big story",
|
"Please tell me a big story",
|
||||||
"mock-conversation-id",
|
"mock-conversation-id",
|
||||||
Context(),
|
Context(),
|
||||||
agent_id="conversation.openai",
|
agent_id="conversation.openai_conversation",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
||||||
@ -285,7 +283,7 @@ async def test_incomplete_response(
|
|||||||
"please tell me a big story",
|
"please tell me a big story",
|
||||||
"mock-conversation-id",
|
"mock-conversation-id",
|
||||||
Context(),
|
Context(),
|
||||||
agent_id="conversation.openai",
|
agent_id="conversation.openai_conversation",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
||||||
@ -324,7 +322,7 @@ async def test_failed_response(
|
|||||||
"next natural number please",
|
"next natural number please",
|
||||||
"mock-conversation-id",
|
"mock-conversation-id",
|
||||||
Context(),
|
Context(),
|
||||||
agent_id="conversation.openai",
|
agent_id="conversation.openai_conversation",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
||||||
@ -583,7 +581,7 @@ async def test_function_call(
|
|||||||
"Please call the test function",
|
"Please call the test function",
|
||||||
mock_chat_log.conversation_id,
|
mock_chat_log.conversation_id,
|
||||||
Context(),
|
Context(),
|
||||||
agent_id="conversation.openai",
|
agent_id="conversation.openai_conversation",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert mock_create_stream.call_args.kwargs["input"][2] == {
|
assert mock_create_stream.call_args.kwargs["input"][2] == {
|
||||||
@ -630,7 +628,7 @@ async def test_function_call_without_reasoning(
|
|||||||
"Please call the test function",
|
"Please call the test function",
|
||||||
mock_chat_log.conversation_id,
|
mock_chat_log.conversation_id,
|
||||||
Context(),
|
Context(),
|
||||||
agent_id="conversation.openai",
|
agent_id="conversation.openai_conversation",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||||
@ -686,7 +684,7 @@ async def test_function_call_invalid(
|
|||||||
"Please call the test function",
|
"Please call the test function",
|
||||||
"mock-conversation-id",
|
"mock-conversation-id",
|
||||||
Context(),
|
Context(),
|
||||||
agent_id="conversation.openai",
|
agent_id="conversation.openai_conversation",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -720,7 +718,7 @@ async def test_assist_api_tools_conversion(
|
|||||||
]
|
]
|
||||||
|
|
||||||
await conversation.async_converse(
|
await conversation.async_converse(
|
||||||
hass, "hello", None, Context(), agent_id="conversation.openai"
|
hass, "hello", None, Context(), agent_id="conversation.openai_conversation"
|
||||||
)
|
)
|
||||||
|
|
||||||
tools = mock_create_stream.mock_calls[0][2]["tools"]
|
tools = mock_create_stream.mock_calls[0][2]["tools"]
|
||||||
@ -735,10 +733,12 @@ async def test_web_search(
|
|||||||
mock_chat_log: MockChatLog, # noqa: F811
|
mock_chat_log: MockChatLog, # noqa: F811
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test web_search_tool."""
|
"""Test web_search_tool."""
|
||||||
hass.config_entries.async_update_entry(
|
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||||
|
hass.config_entries.async_update_subentry(
|
||||||
mock_config_entry,
|
mock_config_entry,
|
||||||
options={
|
subentry,
|
||||||
**mock_config_entry.options,
|
data={
|
||||||
|
**subentry.data,
|
||||||
CONF_WEB_SEARCH: True,
|
CONF_WEB_SEARCH: True,
|
||||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "low",
|
CONF_WEB_SEARCH_CONTEXT_SIZE: "low",
|
||||||
CONF_WEB_SEARCH_USER_LOCATION: True,
|
CONF_WEB_SEARCH_USER_LOCATION: True,
|
||||||
@ -764,7 +764,7 @@ async def test_web_search(
|
|||||||
"What's on the latest news?",
|
"What's on the latest news?",
|
||||||
mock_chat_log.conversation_id,
|
mock_chat_log.conversation_id,
|
||||||
Context(),
|
Context(),
|
||||||
agent_id="conversation.openai",
|
agent_id="conversation.openai_conversation",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert mock_create_stream.mock_calls[0][2]["tools"] == [
|
assert mock_create_stream.mock_calls[0][2]["tools"] == [
|
||||||
|
@ -15,8 +15,10 @@ from openai.types.responses import Response, ResponseOutputMessage, ResponseOutp
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.openai_conversation import CONF_FILENAMES
|
from homeassistant.components.openai_conversation import CONF_FILENAMES
|
||||||
|
from homeassistant.components.openai_conversation.const import DOMAIN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||||
|
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
@ -536,3 +538,271 @@ async def test_generate_content_service_error(
|
|||||||
blocking=True,
|
blocking=True,
|
||||||
return_response=True,
|
return_response=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_migration_from_v1_to_v2(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
device_registry: dr.DeviceRegistry,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
) -> None:
|
||||||
|
"""Test migration from version 1 to version 2."""
|
||||||
|
# Create a v1 config entry with conversation options and an entity
|
||||||
|
OPTIONS = {
|
||||||
|
"recommended": True,
|
||||||
|
"llm_hass_api": ["assist"],
|
||||||
|
"prompt": "You are a helpful assistant",
|
||||||
|
"chat_model": "gpt-4o-mini",
|
||||||
|
}
|
||||||
|
mock_config_entry = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={"api_key": "1234"},
|
||||||
|
options=OPTIONS,
|
||||||
|
version=1,
|
||||||
|
title="ChatGPT",
|
||||||
|
)
|
||||||
|
mock_config_entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
device = device_registry.async_get_or_create(
|
||||||
|
config_entry_id=mock_config_entry.entry_id,
|
||||||
|
identifiers={(DOMAIN, mock_config_entry.entry_id)},
|
||||||
|
name=mock_config_entry.title,
|
||||||
|
manufacturer="OpenAI",
|
||||||
|
model="ChatGPT",
|
||||||
|
entry_type=dr.DeviceEntryType.SERVICE,
|
||||||
|
)
|
||||||
|
entity = entity_registry.async_get_or_create(
|
||||||
|
"conversation",
|
||||||
|
DOMAIN,
|
||||||
|
mock_config_entry.entry_id,
|
||||||
|
config_entry=mock_config_entry,
|
||||||
|
device_id=device.id,
|
||||||
|
suggested_object_id="google_generative_ai_conversation",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run migration
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.openai_conversation.async_setup_entry",
|
||||||
|
return_value=True,
|
||||||
|
):
|
||||||
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert mock_config_entry.version == 2
|
||||||
|
assert mock_config_entry.data == {"api_key": "1234"}
|
||||||
|
assert mock_config_entry.options == {}
|
||||||
|
|
||||||
|
assert len(mock_config_entry.subentries) == 1
|
||||||
|
|
||||||
|
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||||
|
assert subentry.unique_id is None
|
||||||
|
assert subentry.title == "ChatGPT"
|
||||||
|
assert subentry.subentry_type == "conversation"
|
||||||
|
assert subentry.data == OPTIONS
|
||||||
|
|
||||||
|
migrated_entity = entity_registry.async_get(entity.entity_id)
|
||||||
|
assert migrated_entity is not None
|
||||||
|
assert migrated_entity.config_entry_id == mock_config_entry.entry_id
|
||||||
|
assert migrated_entity.config_subentry_id == subentry.subentry_id
|
||||||
|
assert migrated_entity.unique_id == subentry.subentry_id
|
||||||
|
|
||||||
|
# Check device migration
|
||||||
|
assert not device_registry.async_get_device(
|
||||||
|
identifiers={(DOMAIN, mock_config_entry.entry_id)}
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
migrated_device := device_registry.async_get_device(
|
||||||
|
identifiers={(DOMAIN, subentry.subentry_id)}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
assert migrated_device.identifiers == {(DOMAIN, subentry.subentry_id)}
|
||||||
|
assert migrated_device.id == device.id
|
||||||
|
|
||||||
|
|
||||||
|
async def test_migration_from_v1_to_v2_with_multiple_keys(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
device_registry: dr.DeviceRegistry,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
) -> None:
|
||||||
|
"""Test migration from version 1 to version 2 with different API keys."""
|
||||||
|
# Create two v1 config entries with different API keys
|
||||||
|
options = {
|
||||||
|
"recommended": True,
|
||||||
|
"llm_hass_api": ["assist"],
|
||||||
|
"prompt": "You are a helpful assistant",
|
||||||
|
"chat_model": "gpt-4o-mini",
|
||||||
|
}
|
||||||
|
mock_config_entry = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={"api_key": "1234"},
|
||||||
|
options=options,
|
||||||
|
version=1,
|
||||||
|
title="ChatGPT 1",
|
||||||
|
)
|
||||||
|
mock_config_entry.add_to_hass(hass)
|
||||||
|
mock_config_entry_2 = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={"api_key": "12345"},
|
||||||
|
options=options,
|
||||||
|
version=1,
|
||||||
|
title="ChatGPT 2",
|
||||||
|
)
|
||||||
|
mock_config_entry_2.add_to_hass(hass)
|
||||||
|
|
||||||
|
device = device_registry.async_get_or_create(
|
||||||
|
config_entry_id=mock_config_entry.entry_id,
|
||||||
|
identifiers={(DOMAIN, mock_config_entry.entry_id)},
|
||||||
|
name=mock_config_entry.title,
|
||||||
|
manufacturer="OpenAI",
|
||||||
|
model="ChatGPT 1",
|
||||||
|
entry_type=dr.DeviceEntryType.SERVICE,
|
||||||
|
)
|
||||||
|
entity_registry.async_get_or_create(
|
||||||
|
"conversation",
|
||||||
|
DOMAIN,
|
||||||
|
mock_config_entry.entry_id,
|
||||||
|
config_entry=mock_config_entry,
|
||||||
|
device_id=device.id,
|
||||||
|
suggested_object_id="chatgpt_1",
|
||||||
|
)
|
||||||
|
|
||||||
|
device_2 = device_registry.async_get_or_create(
|
||||||
|
config_entry_id=mock_config_entry_2.entry_id,
|
||||||
|
identifiers={(DOMAIN, mock_config_entry_2.entry_id)},
|
||||||
|
name=mock_config_entry_2.title,
|
||||||
|
manufacturer="OpenAI",
|
||||||
|
model="ChatGPT 2",
|
||||||
|
entry_type=dr.DeviceEntryType.SERVICE,
|
||||||
|
)
|
||||||
|
entity_registry.async_get_or_create(
|
||||||
|
"conversation",
|
||||||
|
DOMAIN,
|
||||||
|
mock_config_entry_2.entry_id,
|
||||||
|
config_entry=mock_config_entry_2,
|
||||||
|
device_id=device_2.id,
|
||||||
|
suggested_object_id="chatgpt_2",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run migration
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.openai_conversation.async_setup_entry",
|
||||||
|
return_value=True,
|
||||||
|
):
|
||||||
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
entries = hass.config_entries.async_entries(DOMAIN)
|
||||||
|
assert len(entries) == 2
|
||||||
|
|
||||||
|
for idx, entry in enumerate(entries):
|
||||||
|
assert entry.version == 2
|
||||||
|
assert not entry.options
|
||||||
|
assert len(entry.subentries) == 1
|
||||||
|
subentry = list(entry.subentries.values())[0]
|
||||||
|
assert subentry.subentry_type == "conversation"
|
||||||
|
assert subentry.data == options
|
||||||
|
assert subentry.title == f"ChatGPT {idx + 1}"
|
||||||
|
|
||||||
|
dev = device_registry.async_get_device(
|
||||||
|
identifiers={(DOMAIN, list(entry.subentries.values())[0].subentry_id)}
|
||||||
|
)
|
||||||
|
assert dev is not None
|
||||||
|
|
||||||
|
|
||||||
|
async def test_migration_from_v1_to_v2_with_same_keys(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
device_registry: dr.DeviceRegistry,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
) -> None:
|
||||||
|
"""Test migration from version 1 to version 2 with same API keys consolidates entries."""
|
||||||
|
# Create two v1 config entries with the same API key
|
||||||
|
options = {
|
||||||
|
"recommended": True,
|
||||||
|
"llm_hass_api": ["assist"],
|
||||||
|
"prompt": "You are a helpful assistant",
|
||||||
|
"chat_model": "gpt-4o-mini",
|
||||||
|
}
|
||||||
|
mock_config_entry = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={"api_key": "1234"},
|
||||||
|
options=options,
|
||||||
|
version=1,
|
||||||
|
title="ChatGPT",
|
||||||
|
)
|
||||||
|
mock_config_entry.add_to_hass(hass)
|
||||||
|
mock_config_entry_2 = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={"api_key": "1234"}, # Same API key
|
||||||
|
options=options,
|
||||||
|
version=1,
|
||||||
|
title="ChatGPT 2",
|
||||||
|
)
|
||||||
|
mock_config_entry_2.add_to_hass(hass)
|
||||||
|
|
||||||
|
device = device_registry.async_get_or_create(
|
||||||
|
config_entry_id=mock_config_entry.entry_id,
|
||||||
|
identifiers={(DOMAIN, mock_config_entry.entry_id)},
|
||||||
|
name=mock_config_entry.title,
|
||||||
|
manufacturer="OpenAI",
|
||||||
|
model="ChatGPT",
|
||||||
|
entry_type=dr.DeviceEntryType.SERVICE,
|
||||||
|
)
|
||||||
|
entity_registry.async_get_or_create(
|
||||||
|
"conversation",
|
||||||
|
DOMAIN,
|
||||||
|
mock_config_entry.entry_id,
|
||||||
|
config_entry=mock_config_entry,
|
||||||
|
device_id=device.id,
|
||||||
|
suggested_object_id="chatgpt",
|
||||||
|
)
|
||||||
|
|
||||||
|
device_2 = device_registry.async_get_or_create(
|
||||||
|
config_entry_id=mock_config_entry_2.entry_id,
|
||||||
|
identifiers={(DOMAIN, mock_config_entry_2.entry_id)},
|
||||||
|
name=mock_config_entry_2.title,
|
||||||
|
manufacturer="OpenAI",
|
||||||
|
model="ChatGPT",
|
||||||
|
entry_type=dr.DeviceEntryType.SERVICE,
|
||||||
|
)
|
||||||
|
entity_registry.async_get_or_create(
|
||||||
|
"conversation",
|
||||||
|
DOMAIN,
|
||||||
|
mock_config_entry_2.entry_id,
|
||||||
|
config_entry=mock_config_entry_2,
|
||||||
|
device_id=device_2.id,
|
||||||
|
suggested_object_id="chatgpt_2",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run migration
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.openai_conversation.async_setup_entry",
|
||||||
|
return_value=True,
|
||||||
|
):
|
||||||
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Should have only one entry left (consolidated)
|
||||||
|
entries = hass.config_entries.async_entries(DOMAIN)
|
||||||
|
assert len(entries) == 1
|
||||||
|
|
||||||
|
entry = entries[0]
|
||||||
|
assert entry.version == 2
|
||||||
|
assert not entry.options
|
||||||
|
assert len(entry.subentries) == 2 # Two subentries from the two original entries
|
||||||
|
|
||||||
|
# Check both subentries exist with correct data
|
||||||
|
subentries = list(entry.subentries.values())
|
||||||
|
titles = [sub.title for sub in subentries]
|
||||||
|
assert "ChatGPT" in titles
|
||||||
|
assert "ChatGPT 2" in titles
|
||||||
|
|
||||||
|
for subentry in subentries:
|
||||||
|
assert subentry.subentry_type == "conversation"
|
||||||
|
assert subentry.data == options
|
||||||
|
|
||||||
|
# Check devices were migrated correctly
|
||||||
|
dev = device_registry.async_get_device(
|
||||||
|
identifiers={(DOMAIN, subentry.subentry_id)}
|
||||||
|
)
|
||||||
|
assert dev is not None
|
||||||
|
Loading…
x
Reference in New Issue
Block a user