Convert Claude to use subentries (#147285)

* Convert Claude to use subentries

* Add latest changes from Google subentries

* Revert accidental change to Google
This commit is contained in:
Paulus Schoutsen 2025-06-24 09:36:09 -04:00 committed by GitHub
parent 602c1c64b3
commit 1cb36f4c18
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 696 additions and 162 deletions

View File

@ -6,11 +6,16 @@ from functools import partial
import anthropic import anthropic
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.const import CONF_API_KEY, Platform from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
entity_registry as er,
)
from homeassistant.helpers.typing import ConfigType
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
@ -20,13 +25,24 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient] type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Anthropic."""
await async_migrate_integration(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
"""Set up Anthropic from a config entry.""" """Set up Anthropic from a config entry."""
client = await hass.async_add_executor_job( client = await hass.async_add_executor_job(
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY]) partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
) )
try: try:
model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) # Use model from first conversation subentry for validation
subentries = list(entry.subentries.values())
if subentries:
model_id = subentries[0].data.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
else:
model_id = RECOMMENDED_CHAT_MODEL
model = await client.models.retrieve(model_id=model_id, timeout=10.0) model = await client.models.retrieve(model_id=model_id, timeout=10.0)
LOGGER.debug("Anthropic model: %s", model.display_name) LOGGER.debug("Anthropic model: %s", model.display_name)
except anthropic.AuthenticationError as err: except anthropic.AuthenticationError as err:
@ -45,3 +61,68 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Anthropic.""" """Unload Anthropic."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_migrate_integration(hass: HomeAssistant) -> None:
"""Migrate integration entry structure."""
entries = hass.config_entries.async_entries(DOMAIN)
if not any(entry.version == 1 for entry in entries):
return
api_keys_entries: dict[str, ConfigEntry] = {}
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
for entry in entries:
use_existing = False
subentry = ConfigSubentry(
data=entry.options,
subentry_type="conversation",
title=entry.title,
unique_id=None,
)
if entry.data[CONF_API_KEY] not in api_keys_entries:
use_existing = True
api_keys_entries[entry.data[CONF_API_KEY]] = entry
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]
hass.config_entries.async_add_subentry(parent_entry, subentry)
conversation_entity = entity_registry.async_get_entity_id(
"conversation",
DOMAIN,
entry.entry_id,
)
if conversation_entity is not None:
entity_registry.async_update_entity(
conversation_entity,
config_entry_id=parent_entry.entry_id,
config_subentry_id=subentry.subentry_id,
new_unique_id=subentry.subentry_id,
)
device = device_registry.async_get_device(
identifiers={(DOMAIN, entry.entry_id)}
)
if device is not None:
device_registry.async_update_device(
device.id,
new_identifiers={(DOMAIN, subentry.subentry_id)},
add_config_subentry_id=subentry.subentry_id,
add_config_entry_id=parent_entry.entry_id,
)
if parent_entry.entry_id != entry.entry_id:
device_registry.async_update_device(
device.id,
remove_config_entry_id=entry.entry_id,
)
if not use_existing:
await hass.config_entries.async_remove(entry.entry_id)
else:
hass.config_entries.async_update_entry(
entry,
options={},
version=2,
)

View File

@ -5,20 +5,21 @@ from __future__ import annotations
from collections.abc import Mapping from collections.abc import Mapping
from functools import partial from functools import partial
import logging import logging
from types import MappingProxyType from typing import Any, cast
from typing import Any
import anthropic import anthropic
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ( from homeassistant.config_entries import (
ConfigEntry, ConfigEntry,
ConfigEntryState,
ConfigFlow, ConfigFlow,
ConfigFlowResult, ConfigFlowResult,
OptionsFlow, ConfigSubentryFlow,
SubentryFlowResult,
) )
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import llm from homeassistant.helpers import llm
from homeassistant.helpers.selector import ( from homeassistant.helpers.selector import (
NumberSelector, NumberSelector,
@ -36,6 +37,7 @@ from .const import (
CONF_RECOMMENDED, CONF_RECOMMENDED,
CONF_TEMPERATURE, CONF_TEMPERATURE,
CONF_THINKING_BUDGET, CONF_THINKING_BUDGET,
DEFAULT_CONVERSATION_NAME,
DOMAIN, DOMAIN,
RECOMMENDED_CHAT_MODEL, RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS, RECOMMENDED_MAX_TOKENS,
@ -72,7 +74,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN): class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Anthropic.""" """Handle a config flow for Anthropic."""
VERSION = 1 VERSION = 2
async def async_step_user( async def async_step_user(
self, user_input: dict[str, Any] | None = None self, user_input: dict[str, Any] | None = None
@ -81,6 +83,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
errors = {} errors = {}
if user_input is not None: if user_input is not None:
self._async_abort_entries_match(user_input)
try: try:
await validate_input(self.hass, user_input) await validate_input(self.hass, user_input)
except anthropic.APITimeoutError: except anthropic.APITimeoutError:
@ -102,39 +105,62 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_create_entry( return self.async_create_entry(
title="Claude", title="Claude",
data=user_input, data=user_input,
options=RECOMMENDED_OPTIONS, subentries=[
{
"subentry_type": "conversation",
"data": RECOMMENDED_OPTIONS,
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
}
],
) )
return self.async_show_form( return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None
) )
@staticmethod @classmethod
def async_get_options_flow( @callback
config_entry: ConfigEntry, def async_get_supported_subentry_types(
) -> OptionsFlow: cls, config_entry: ConfigEntry
"""Create the options flow.""" ) -> dict[str, type[ConfigSubentryFlow]]:
return AnthropicOptionsFlow(config_entry) """Return subentries supported by this integration."""
return {"conversation": ConversationSubentryFlowHandler}
class AnthropicOptionsFlow(OptionsFlow): class ConversationSubentryFlowHandler(ConfigSubentryFlow):
"""Anthropic config flow options handler.""" """Flow for managing conversation subentries."""
def __init__(self, config_entry: ConfigEntry) -> None: last_rendered_recommended = False
"""Initialize options flow."""
self.last_rendered_recommended = config_entry.options.get(
CONF_RECOMMENDED, False
)
async def async_step_init( @property
def _is_new(self) -> bool:
"""Return if this is a new subentry."""
return self.source == "user"
async def async_step_set_options(
self, user_input: dict[str, Any] | None = None self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult: ) -> SubentryFlowResult:
"""Manage the options.""" """Set conversation options."""
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options # abort if entry is not loaded
if self._get_entry().state != ConfigEntryState.LOADED:
return self.async_abort(reason="entry_not_loaded")
errors: dict[str, str] = {} errors: dict[str, str] = {}
if user_input is not None: if user_input is None:
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended: if self._is_new:
options = RECOMMENDED_OPTIONS.copy()
else:
# If this is a reconfiguration, we need to copy the existing options
# so that we can show the current values in the form.
options = self._get_reconfigure_subentry().data.copy()
self.last_rendered_recommended = cast(
bool, options.get(CONF_RECOMMENDED, False)
)
elif user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
if not user_input.get(CONF_LLM_HASS_API): if not user_input.get(CONF_LLM_HASS_API):
user_input.pop(CONF_LLM_HASS_API, None) user_input.pop(CONF_LLM_HASS_API, None)
if user_input.get( if user_input.get(
@ -143,7 +169,20 @@ class AnthropicOptionsFlow(OptionsFlow):
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large" errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
if not errors: if not errors:
return self.async_create_entry(title="", data=user_input) if self._is_new:
return self.async_create_entry(
title=user_input.pop(CONF_NAME),
data=user_input,
)
return self.async_update_and_abort(
self._get_entry(),
self._get_reconfigure_subentry(),
data=user_input,
)
options = user_input
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
else: else:
# Re-render the options again, now with the recommended options shown/hidden # Re-render the options again, now with the recommended options shown/hidden
self.last_rendered_recommended = user_input[CONF_RECOMMENDED] self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
@ -163,19 +202,25 @@ class AnthropicOptionsFlow(OptionsFlow):
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis] suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
schema = self.add_suggested_values_to_schema( schema = self.add_suggested_values_to_schema(
vol.Schema(anthropic_config_option_schema(self.hass, options)), vol.Schema(
anthropic_config_option_schema(self.hass, self._is_new, options)
),
suggested_values, suggested_values,
) )
return self.async_show_form( return self.async_show_form(
step_id="init", step_id="set_options",
data_schema=schema, data_schema=schema,
errors=errors or None, errors=errors or None,
) )
async_step_user = async_step_set_options
async_step_reconfigure = async_step_set_options
def anthropic_config_option_schema( def anthropic_config_option_schema(
hass: HomeAssistant, hass: HomeAssistant,
is_new: bool,
options: Mapping[str, Any], options: Mapping[str, Any],
) -> dict: ) -> dict:
"""Return a schema for Anthropic completion options.""" """Return a schema for Anthropic completion options."""
@ -187,7 +232,15 @@ def anthropic_config_option_schema(
for api in llm.async_get_apis(hass) for api in llm.async_get_apis(hass)
] ]
schema = { if is_new:
schema: dict[vol.Required | vol.Optional, Any] = {
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
}
else:
schema = {}
schema.update(
{
vol.Optional(CONF_PROMPT): TemplateSelector(), vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional( vol.Optional(
CONF_LLM_HASS_API, CONF_LLM_HASS_API,
@ -196,6 +249,7 @@ def anthropic_config_option_schema(
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False) CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
): bool, ): bool,
} }
)
if options.get(CONF_RECOMMENDED): if options.get(CONF_RECOMMENDED):
return schema return schema

View File

@ -5,6 +5,8 @@ import logging
DOMAIN = "anthropic" DOMAIN = "anthropic"
LOGGER = logging.getLogger(__package__) LOGGER = logging.getLogger(__package__)
DEFAULT_CONVERSATION_NAME = "Claude conversation"
CONF_RECOMMENDED = "recommended" CONF_RECOMMENDED = "recommended"
CONF_PROMPT = "prompt" CONF_PROMPT = "prompt"
CONF_CHAT_MODEL = "chat_model" CONF_CHAT_MODEL = "chat_model"

View File

@ -38,7 +38,7 @@ from anthropic.types import (
from voluptuous_openapi import convert from voluptuous_openapi import convert
from homeassistant.components import conversation from homeassistant.components import conversation
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
@ -72,8 +72,14 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback, async_add_entities: AddConfigEntryEntitiesCallback,
) -> None: ) -> None:
"""Set up conversation entities.""" """Set up conversation entities."""
agent = AnthropicConversationEntity(config_entry) for subentry in config_entry.subentries.values():
async_add_entities([agent]) if subentry.subentry_type != "conversation":
continue
async_add_entities(
[AnthropicConversationEntity(config_entry, subentry)],
config_subentry_id=subentry.subentry_id,
)
def _format_tool( def _format_tool(
@ -326,21 +332,22 @@ class AnthropicConversationEntity(
): ):
"""Anthropic conversation agent.""" """Anthropic conversation agent."""
_attr_has_entity_name = True
_attr_name = None
_attr_supports_streaming = True _attr_supports_streaming = True
def __init__(self, entry: AnthropicConfigEntry) -> None: def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
"""Initialize the agent.""" """Initialize the agent."""
self.entry = entry self.entry = entry
self._attr_unique_id = entry.entry_id self.subentry = subentry
self._attr_name = subentry.title
self._attr_unique_id = subentry.subentry_id
self._attr_device_info = dr.DeviceInfo( self._attr_device_info = dr.DeviceInfo(
identifiers={(DOMAIN, entry.entry_id)}, identifiers={(DOMAIN, subentry.subentry_id)},
name=subentry.title,
manufacturer="Anthropic", manufacturer="Anthropic",
model="Claude", model="Claude",
entry_type=dr.DeviceEntryType.SERVICE, entry_type=dr.DeviceEntryType.SERVICE,
) )
if self.entry.options.get(CONF_LLM_HASS_API): if self.subentry.data.get(CONF_LLM_HASS_API):
self._attr_supported_features = ( self._attr_supported_features = (
conversation.ConversationEntityFeature.CONTROL conversation.ConversationEntityFeature.CONTROL
) )
@ -363,7 +370,7 @@ class AnthropicConversationEntity(
chat_log: conversation.ChatLog, chat_log: conversation.ChatLog,
) -> conversation.ConversationResult: ) -> conversation.ConversationResult:
"""Call the API.""" """Call the API."""
options = self.entry.options options = self.subentry.data
try: try:
await chat_log.async_provide_llm_data( await chat_log.async_provide_llm_data(
@ -393,7 +400,7 @@ class AnthropicConversationEntity(
chat_log: conversation.ChatLog, chat_log: conversation.ChatLog,
) -> None: ) -> None:
"""Generate an answer for the chat log.""" """Generate an answer for the chat log."""
options = self.entry.options options = self.subentry.data
tools: list[ToolParam] | None = None tools: list[ToolParam] | None = None
if chat_log.llm_api: if chat_log.llm_api:

View File

@ -12,12 +12,23 @@
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"authentication_error": "[%key:common::config_flow::error::invalid_auth%]", "authentication_error": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
} }
}, },
"options": { "config_subentries": {
"conversation": {
"initiate_flow": {
"user": "Add conversation agent",
"reconfigure": "Reconfigure conversation agent"
},
"entry_type": "Conversation agent",
"step": { "step": {
"init": { "set_options": {
"data": { "data": {
"name": "[%key:common::config_flow::data::name%]",
"prompt": "Instructions", "prompt": "Instructions",
"chat_model": "[%key:common::generic::model%]", "chat_model": "[%key:common::generic::model%]",
"max_tokens": "Maximum tokens to return in response", "max_tokens": "Maximum tokens to return in response",
@ -32,8 +43,13 @@
} }
} }
}, },
"abort": {
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"entry_not_loaded": "Cannot add things while the configuration is disabled."
},
"error": { "error": {
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget." "thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
} }
} }
}
} }

View File

@ -6,6 +6,7 @@ from unittest.mock import patch
import pytest import pytest
from homeassistant.components.anthropic import CONF_CHAT_MODEL from homeassistant.components.anthropic import CONF_CHAT_MODEL
from homeassistant.components.anthropic.const import DEFAULT_CONVERSATION_NAME
from homeassistant.const import CONF_LLM_HASS_API from homeassistant.const import CONF_LLM_HASS_API
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import llm from homeassistant.helpers import llm
@ -23,6 +24,15 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
data={ data={
"api_key": "bla", "api_key": "bla",
}, },
version=2,
subentries_data=[
{
"data": {},
"subentry_type": "conversation",
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
}
],
) )
entry.add_to_hass(hass) entry.add_to_hass(hass)
return entry return entry
@ -33,8 +43,10 @@ def mock_config_entry_with_assist(
hass: HomeAssistant, mock_config_entry: MockConfigEntry hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> MockConfigEntry: ) -> MockConfigEntry:
"""Mock a config entry with assist.""" """Mock a config entry with assist."""
hass.config_entries.async_update_entry( hass.config_entries.async_update_subentry(
mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={CONF_LLM_HASS_API: llm.LLM_API_ASSIST},
) )
return mock_config_entry return mock_config_entry
@ -44,9 +56,10 @@ def mock_config_entry_with_extended_thinking(
hass: HomeAssistant, mock_config_entry: MockConfigEntry hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> MockConfigEntry: ) -> MockConfigEntry:
"""Mock a config entry with assist.""" """Mock a config entry with assist."""
hass.config_entries.async_update_entry( hass.config_entries.async_update_subentry(
mock_config_entry, mock_config_entry,
options={ next(iter(mock_config_entry.subentries.values())),
data={
CONF_LLM_HASS_API: llm.LLM_API_ASSIST, CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_CHAT_MODEL: "claude-3-7-sonnet-latest", CONF_CHAT_MODEL: "claude-3-7-sonnet-latest",
}, },

View File

@ -16,7 +16,7 @@
'role': 'user', 'role': 'user',
}), }),
dict({ dict({
'agent_id': 'conversation.claude', 'agent_id': 'conversation.claude_conversation',
'content': 'Certainly, calling it now!', 'content': 'Certainly, calling it now!',
'role': 'assistant', 'role': 'assistant',
'tool_calls': list([ 'tool_calls': list([
@ -30,14 +30,14 @@
]), ]),
}), }),
dict({ dict({
'agent_id': 'conversation.claude', 'agent_id': 'conversation.claude_conversation',
'role': 'tool_result', 'role': 'tool_result',
'tool_call_id': 'toolu_0123456789AbCdEfGhIjKlM', 'tool_call_id': 'toolu_0123456789AbCdEfGhIjKlM',
'tool_name': 'test_tool', 'tool_name': 'test_tool',
'tool_result': 'Test response', 'tool_result': 'Test response',
}), }),
dict({ dict({
'agent_id': 'conversation.claude', 'agent_id': 'conversation.claude_conversation',
'content': 'I have successfully called the function', 'content': 'I have successfully called the function',
'role': 'assistant', 'role': 'assistant',
'tool_calls': None, 'tool_calls': None,

View File

@ -1,6 +1,6 @@
"""Test the Anthropic config flow.""" """Test the Anthropic config flow."""
from unittest.mock import AsyncMock, patch from unittest.mock import AsyncMock, Mock, patch
from anthropic import ( from anthropic import (
APIConnectionError, APIConnectionError,
@ -22,12 +22,13 @@ from homeassistant.components.anthropic.const import (
CONF_RECOMMENDED, CONF_RECOMMENDED,
CONF_TEMPERATURE, CONF_TEMPERATURE,
CONF_THINKING_BUDGET, CONF_THINKING_BUDGET,
DEFAULT_CONVERSATION_NAME,
DOMAIN, DOMAIN,
RECOMMENDED_CHAT_MODEL, RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS, RECOMMENDED_MAX_TOKENS,
RECOMMENDED_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET,
) )
from homeassistant.const import CONF_LLM_HASS_API from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType from homeassistant.data_entry_flow import FlowResultType
@ -71,39 +72,103 @@ async def test_form(hass: HomeAssistant) -> None:
assert result2["data"] == { assert result2["data"] == {
"api_key": "bla", "api_key": "bla",
} }
assert result2["options"] == RECOMMENDED_OPTIONS assert result2["options"] == {}
assert result2["subentries"] == [
{
"subentry_type": "conversation",
"data": RECOMMENDED_OPTIONS,
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
}
]
assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
async def test_options( async def test_duplicate_entry(hass: HomeAssistant) -> None:
hass: HomeAssistant, mock_config_entry, mock_init_component """Test we abort on duplicate config entry."""
) -> None: MockConfigEntry(
"""Test the options form.""" domain=DOMAIN,
options_flow = await hass.config_entries.options.async_init( data={CONF_API_KEY: "bla"},
mock_config_entry.entry_id ).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
) )
options = await hass.config_entries.options.async_configure( assert result["type"] is FlowResultType.FORM
options_flow["flow_id"], assert not result["errors"]
with patch(
"anthropic.resources.models.AsyncModels.retrieve",
return_value=Mock(display_name="Claude 3.5 Sonnet"),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{ {
"prompt": "Speak like a pirate", CONF_API_KEY: "bla",
"max_tokens": 200,
}, },
) )
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
async def test_creating_conversation_subentry(
hass: HomeAssistant, mock_config_entry, mock_init_component
) -> None:
"""Test creating a conversation subentry."""
result = await hass.config_entries.subentries.async_init(
(mock_config_entry.entry_id, "conversation"),
context={"source": config_entries.SOURCE_USER},
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "set_options"
assert not result["errors"]
result2 = await hass.config_entries.subentries.async_configure(
result["flow_id"],
{CONF_NAME: "Mock name", **RECOMMENDED_OPTIONS},
)
await hass.async_block_till_done() await hass.async_block_till_done()
assert options["type"] is FlowResultType.CREATE_ENTRY
assert options["data"]["prompt"] == "Speak like a pirate" assert result2["type"] is FlowResultType.CREATE_ENTRY
assert options["data"]["max_tokens"] == 200 assert result2["title"] == "Mock name"
assert options["data"][CONF_CHAT_MODEL] == RECOMMENDED_CHAT_MODEL
processed_options = RECOMMENDED_OPTIONS.copy()
processed_options[CONF_PROMPT] = processed_options[CONF_PROMPT].strip()
assert result2["data"] == processed_options
async def test_options_thinking_budget_more_than_max( async def test_creating_conversation_subentry_not_loaded(
hass: HomeAssistant,
mock_init_component,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test creating a conversation subentry when entry is not loaded."""
await hass.config_entries.async_unload(mock_config_entry.entry_id)
with patch(
"anthropic.resources.models.AsyncModels.list",
return_value=[],
):
result = await hass.config_entries.subentries.async_init(
(mock_config_entry.entry_id, "conversation"),
context={"source": config_entries.SOURCE_USER},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "entry_not_loaded"
async def test_subentry_options_thinking_budget_more_than_max(
hass: HomeAssistant, mock_config_entry, mock_init_component hass: HomeAssistant, mock_config_entry, mock_init_component
) -> None: ) -> None:
"""Test error about thinking budget being more than max tokens.""" """Test error about thinking budget being more than max tokens."""
options_flow = await hass.config_entries.options.async_init( subentry = next(iter(mock_config_entry.subentries.values()))
mock_config_entry.entry_id options_flow = await mock_config_entry.start_subentry_reconfigure_flow(
hass, subentry.subentry_id
) )
options = await hass.config_entries.options.async_configure( options = await hass.config_entries.subentries.async_configure(
options_flow["flow_id"], options_flow["flow_id"],
{ {
"prompt": "Speak like a pirate", "prompt": "Speak like a pirate",
@ -111,6 +176,7 @@ async def test_options_thinking_budget_more_than_max(
"chat_model": "claude-3-7-sonnet-latest", "chat_model": "claude-3-7-sonnet-latest",
"temperature": 1, "temperature": 1,
"thinking_budget": 16384, "thinking_budget": 16384,
"recommended": False,
}, },
) )
await hass.async_block_till_done() await hass.async_block_till_done()
@ -252,7 +318,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
), ),
], ],
) )
async def test_options_switching( async def test_subentry_options_switching(
hass: HomeAssistant, hass: HomeAssistant,
mock_config_entry, mock_config_entry,
mock_init_component, mock_init_component,
@ -260,23 +326,29 @@ async def test_options_switching(
new_options, new_options,
expected_options, expected_options,
) -> None: ) -> None:
"""Test the options form.""" """Test the subentry options form."""
hass.config_entries.async_update_entry(mock_config_entry, options=current_options) subentry = next(iter(mock_config_entry.subentries.values()))
options_flow = await hass.config_entries.options.async_init( hass.config_entries.async_update_subentry(
mock_config_entry.entry_id mock_config_entry, subentry, data=current_options
)
await hass.async_block_till_done()
options_flow = await mock_config_entry.start_subentry_reconfigure_flow(
hass, subentry.subentry_id
) )
if current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED): if current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED):
options_flow = await hass.config_entries.options.async_configure( options_flow = await hass.config_entries.subentries.async_configure(
options_flow["flow_id"], options_flow["flow_id"],
{ {
**current_options, **current_options,
CONF_RECOMMENDED: new_options[CONF_RECOMMENDED], CONF_RECOMMENDED: new_options[CONF_RECOMMENDED],
}, },
) )
options = await hass.config_entries.options.async_configure( options = await hass.config_entries.subentries.async_configure(
options_flow["flow_id"], options_flow["flow_id"],
new_options, new_options,
) )
await hass.async_block_till_done() await hass.async_block_till_done()
assert options["type"] is FlowResultType.CREATE_ENTRY assert options["type"] is FlowResultType.ABORT
assert options["data"] == expected_options assert options["reason"] == "reconfigure_successful"
assert subentry.data == expected_options

View File

@ -180,21 +180,23 @@ async def test_entity(
mock_init_component, mock_init_component,
) -> None: ) -> None:
"""Test entity properties.""" """Test entity properties."""
state = hass.states.get("conversation.claude") state = hass.states.get("conversation.claude_conversation")
assert state assert state
assert state.attributes["supported_features"] == 0 assert state.attributes["supported_features"] == 0
hass.config_entries.async_update_entry( subentry = next(iter(mock_config_entry.subentries.values()))
hass.config_entries.async_update_subentry(
mock_config_entry, mock_config_entry,
options={ subentry,
**mock_config_entry.options, data={
**subentry.data,
CONF_LLM_HASS_API: "assist", CONF_LLM_HASS_API: "assist",
}, },
) )
with patch("anthropic.resources.models.AsyncModels.retrieve"): with patch("anthropic.resources.models.AsyncModels.retrieve"):
await hass.config_entries.async_reload(mock_config_entry.entry_id) await hass.config_entries.async_reload(mock_config_entry.entry_id)
state = hass.states.get("conversation.claude") state = hass.states.get("conversation.claude_conversation")
assert state assert state
assert ( assert (
state.attributes["supported_features"] state.attributes["supported_features"]
@ -218,7 +220,7 @@ async def test_error_handling(
), ),
): ):
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", None, Context(), agent_id="conversation.claude" hass, "hello", None, Context(), agent_id="conversation.claude_conversation"
) )
assert result.response.response_type == intent.IntentResponseType.ERROR assert result.response.response_type == intent.IntentResponseType.ERROR
@ -229,9 +231,11 @@ async def test_template_error(
hass: HomeAssistant, mock_config_entry: MockConfigEntry hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> None: ) -> None:
"""Test that template error handling works.""" """Test that template error handling works."""
hass.config_entries.async_update_entry( subentry = next(iter(mock_config_entry.subentries.values()))
hass.config_entries.async_update_subentry(
mock_config_entry, mock_config_entry,
options={ subentry,
data={
"prompt": "talk like a {% if True %}smarthome{% else %}pirate please.", "prompt": "talk like a {% if True %}smarthome{% else %}pirate please.",
}, },
) )
@ -244,7 +248,7 @@ async def test_template_error(
await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done() await hass.async_block_till_done()
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", None, Context(), agent_id="conversation.claude" hass, "hello", None, Context(), agent_id="conversation.claude_conversation"
) )
assert result.response.response_type == intent.IntentResponseType.ERROR assert result.response.response_type == intent.IntentResponseType.ERROR
@ -260,9 +264,11 @@ async def test_template_variables(
mock_user.id = "12345" mock_user.id = "12345"
mock_user.name = "Test User" mock_user.name = "Test User"
hass.config_entries.async_update_entry( subentry = next(iter(mock_config_entry.subentries.values()))
hass.config_entries.async_update_subentry(
mock_config_entry, mock_config_entry,
options={ subentry,
data={
"prompt": ( "prompt": (
"The user name is {{ user_name }}. " "The user name is {{ user_name }}. "
"The user id is {{ llm_context.context.user_id }}." "The user id is {{ llm_context.context.user_id }}."
@ -286,7 +292,7 @@ async def test_template_variables(
await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done() await hass.async_block_till_done()
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", None, context, agent_id="conversation.claude" hass, "hello", None, context, agent_id="conversation.claude_conversation"
) )
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
@ -304,7 +310,9 @@ async def test_conversation_agent(
mock_init_component, mock_init_component,
) -> None: ) -> None:
"""Test Anthropic Agent.""" """Test Anthropic Agent."""
agent = conversation.agent_manager.async_get_agent(hass, "conversation.claude") agent = conversation.agent_manager.async_get_agent(
hass, "conversation.claude_conversation"
)
assert agent.supported_languages == "*" assert agent.supported_languages == "*"
@ -332,7 +340,7 @@ async def test_function_call(
expected_call_tool_args: dict[str, Any], expected_call_tool_args: dict[str, Any],
) -> None: ) -> None:
"""Test function call from the assistant.""" """Test function call from the assistant."""
agent_id = "conversation.claude" agent_id = "conversation.claude_conversation"
context = Context() context = Context()
mock_tool = AsyncMock() mock_tool = AsyncMock()
@ -430,7 +438,7 @@ async def test_function_exception(
mock_init_component, mock_init_component,
) -> None: ) -> None:
"""Test function call with exception.""" """Test function call with exception."""
agent_id = "conversation.claude" agent_id = "conversation.claude_conversation"
context = Context() context = Context()
mock_tool = AsyncMock() mock_tool = AsyncMock()
@ -536,7 +544,7 @@ async def test_assist_api_tools_conversion(
): ):
assert await async_setup_component(hass, component, {}) assert await async_setup_component(hass, component, {})
agent_id = "conversation.claude" agent_id = "conversation.claude_conversation"
with patch( with patch(
"anthropic.resources.messages.AsyncMessages.create", "anthropic.resources.messages.AsyncMessages.create",
new_callable=AsyncMock, new_callable=AsyncMock,
@ -561,17 +569,19 @@ async def test_unknown_hass_api(
mock_init_component, mock_init_component,
) -> None: ) -> None:
"""Test when we reference an API that no longer exists.""" """Test when we reference an API that no longer exists."""
hass.config_entries.async_update_entry( subentry = next(iter(mock_config_entry.subentries.values()))
hass.config_entries.async_update_subentry(
mock_config_entry, mock_config_entry,
options={ subentry,
**mock_config_entry.options, data={
**subentry.data,
CONF_LLM_HASS_API: "non-existing", CONF_LLM_HASS_API: "non-existing",
}, },
) )
await hass.async_block_till_done() await hass.async_block_till_done()
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", "1234", Context(), agent_id="conversation.claude" hass, "hello", "1234", Context(), agent_id="conversation.claude_conversation"
) )
assert result == snapshot assert result == snapshot
@ -597,17 +607,25 @@ async def test_conversation_id(
side_effect=create_stream_generator, side_effect=create_stream_generator,
): ):
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", "1234", Context(), agent_id="conversation.claude" hass,
"hello",
"1234",
Context(),
agent_id="conversation.claude_conversation",
) )
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", None, None, agent_id="conversation.claude" hass, "hello", None, None, agent_id="conversation.claude_conversation"
) )
conversation_id = result.conversation_id conversation_id = result.conversation_id
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", conversation_id, None, agent_id="conversation.claude" hass,
"hello",
conversation_id,
None,
agent_id="conversation.claude_conversation",
) )
assert result.conversation_id == conversation_id assert result.conversation_id == conversation_id
@ -615,13 +633,13 @@ async def test_conversation_id(
unknown_id = ulid_util.ulid() unknown_id = ulid_util.ulid()
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", unknown_id, None, agent_id="conversation.claude" hass, "hello", unknown_id, None, agent_id="conversation.claude_conversation"
) )
assert result.conversation_id != unknown_id assert result.conversation_id != unknown_id
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", "koala", None, agent_id="conversation.claude" hass, "hello", "koala", None, agent_id="conversation.claude_conversation"
) )
assert result.conversation_id == "koala" assert result.conversation_id == "koala"
@ -654,7 +672,7 @@ async def test_refusal(
"2631EDCF22E8CCC1FB35B501C9C86", "2631EDCF22E8CCC1FB35B501C9C86",
None, None,
Context(), Context(),
agent_id="conversation.claude", agent_id="conversation.claude_conversation",
) )
assert result.response.response_type == intent.IntentResponseType.ERROR assert result.response.response_type == intent.IntentResponseType.ERROR
@ -695,7 +713,7 @@ async def test_extended_thinking(
), ),
): ):
result = await conversation.async_converse( result = await conversation.async_converse(
hass, "hello", None, Context(), agent_id="conversation.claude" hass, "hello", None, Context(), agent_id="conversation.claude_conversation"
) )
chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get( chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get(
@ -732,7 +750,7 @@ async def test_redacted_thinking(
"8432ECCCE4C1253D5E2D82641AC0E52CC2876CB", "8432ECCCE4C1253D5E2D82641AC0E52CC2876CB",
None, None,
Context(), Context(),
agent_id="conversation.claude", agent_id="conversation.claude_conversation",
) )
chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get( chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get(
@ -751,7 +769,7 @@ async def test_extended_thinking_tool_call(
snapshot: SnapshotAssertion, snapshot: SnapshotAssertion,
) -> None: ) -> None:
"""Test that thinking blocks and their order are preserved in with tool calls.""" """Test that thinking blocks and their order are preserved in with tool calls."""
agent_id = "conversation.claude" agent_id = "conversation.claude_conversation"
context = Context() context = Context()
mock_tool = AsyncMock() mock_tool = AsyncMock()
@ -841,7 +859,8 @@ async def test_extended_thinking_tool_call(
conversation.chat_log.SystemContent("You are a helpful assistant."), conversation.chat_log.SystemContent("You are a helpful assistant."),
conversation.chat_log.UserContent("What shape is a donut?"), conversation.chat_log.UserContent("What shape is a donut?"),
conversation.chat_log.AssistantContent( conversation.chat_log.AssistantContent(
agent_id="conversation.claude", content="A donut is a torus." agent_id="conversation.claude_conversation",
content="A donut is a torus.",
), ),
], ],
[ [
@ -849,10 +868,11 @@ async def test_extended_thinking_tool_call(
conversation.chat_log.UserContent("What shape is a donut?"), conversation.chat_log.UserContent("What shape is a donut?"),
conversation.chat_log.UserContent("Can you tell me?"), conversation.chat_log.UserContent("Can you tell me?"),
conversation.chat_log.AssistantContent( conversation.chat_log.AssistantContent(
agent_id="conversation.claude", content="A donut is a torus." agent_id="conversation.claude_conversation",
content="A donut is a torus.",
), ),
conversation.chat_log.AssistantContent( conversation.chat_log.AssistantContent(
agent_id="conversation.claude", content="Hope this helps." agent_id="conversation.claude_conversation", content="Hope this helps."
), ),
], ],
[ [
@ -861,20 +881,21 @@ async def test_extended_thinking_tool_call(
conversation.chat_log.UserContent("Can you tell me?"), conversation.chat_log.UserContent("Can you tell me?"),
conversation.chat_log.UserContent("Please?"), conversation.chat_log.UserContent("Please?"),
conversation.chat_log.AssistantContent( conversation.chat_log.AssistantContent(
agent_id="conversation.claude", content="A donut is a torus." agent_id="conversation.claude_conversation",
content="A donut is a torus.",
), ),
conversation.chat_log.AssistantContent( conversation.chat_log.AssistantContent(
agent_id="conversation.claude", content="Hope this helps." agent_id="conversation.claude_conversation", content="Hope this helps."
), ),
conversation.chat_log.AssistantContent( conversation.chat_log.AssistantContent(
agent_id="conversation.claude", content="You are welcome." agent_id="conversation.claude_conversation", content="You are welcome."
), ),
], ],
[ [
conversation.chat_log.SystemContent("You are a helpful assistant."), conversation.chat_log.SystemContent("You are a helpful assistant."),
conversation.chat_log.UserContent("Turn off the lights and make me coffee"), conversation.chat_log.UserContent("Turn off the lights and make me coffee"),
conversation.chat_log.AssistantContent( conversation.chat_log.AssistantContent(
agent_id="conversation.claude", agent_id="conversation.claude_conversation",
content="Sure.", content="Sure.",
tool_calls=[ tool_calls=[
llm.ToolInput( llm.ToolInput(
@ -891,19 +912,19 @@ async def test_extended_thinking_tool_call(
), ),
conversation.chat_log.UserContent("Thank you"), conversation.chat_log.UserContent("Thank you"),
conversation.chat_log.ToolResultContent( conversation.chat_log.ToolResultContent(
agent_id="conversation.claude", agent_id="conversation.claude_conversation",
tool_call_id="mock-tool-call-id", tool_call_id="mock-tool-call-id",
tool_name="HassTurnOff", tool_name="HassTurnOff",
tool_result={"success": True, "response": "Lights are off."}, tool_result={"success": True, "response": "Lights are off."},
), ),
conversation.chat_log.ToolResultContent( conversation.chat_log.ToolResultContent(
agent_id="conversation.claude", agent_id="conversation.claude_conversation",
tool_call_id="mock-tool-call-id-2", tool_call_id="mock-tool-call-id-2",
tool_name="MakeCoffee", tool_name="MakeCoffee",
tool_result={"success": False, "response": "Not enough milk."}, tool_result={"success": False, "response": "Not enough milk."},
), ),
conversation.chat_log.AssistantContent( conversation.chat_log.AssistantContent(
agent_id="conversation.claude", agent_id="conversation.claude_conversation",
content="Should I add milk to the shopping list?", content="Should I add milk to the shopping list?",
), ),
], ],
@ -940,7 +961,7 @@ async def test_history_conversion(
"Are you sure?", "Are you sure?",
conversation_id, conversation_id,
Context(), Context(),
agent_id="conversation.claude", agent_id="conversation.claude_conversation",
) )
assert mock_create.mock_calls[0][2]["messages"] == snapshot assert mock_create.mock_calls[0][2]["messages"] == snapshot

View File

@ -11,7 +11,9 @@ from anthropic import (
from httpx import URL, Request, Response from httpx import URL, Request, Response
import pytest import pytest
from homeassistant.components.anthropic.const import DOMAIN
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.setup import async_setup_component from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry from tests.common import MockConfigEntry
@ -61,3 +63,269 @@ async def test_init_error(
assert await async_setup_component(hass, "anthropic", {}) assert await async_setup_component(hass, "anthropic", {})
await hass.async_block_till_done() await hass.async_block_till_done()
assert error in caplog.text assert error in caplog.text
async def test_migration_from_v1_to_v2(
hass: HomeAssistant,
device_registry: dr.DeviceRegistry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test migration from version 1 to version 2."""
# Create a v1 config entry with conversation options and an entity
OPTIONS = {
"recommended": True,
"llm_hass_api": ["assist"],
"prompt": "You are a helpful assistant",
"chat_model": "claude-3-haiku-20240307",
}
mock_config_entry = MockConfigEntry(
domain=DOMAIN,
data={"api_key": "1234"},
options=OPTIONS,
version=1,
title="Claude",
)
mock_config_entry.add_to_hass(hass)
device = device_registry.async_get_or_create(
config_entry_id=mock_config_entry.entry_id,
identifiers={(DOMAIN, mock_config_entry.entry_id)},
name=mock_config_entry.title,
manufacturer="Anthropic",
model="Claude",
entry_type=dr.DeviceEntryType.SERVICE,
)
entity = entity_registry.async_get_or_create(
"conversation",
DOMAIN,
mock_config_entry.entry_id,
config_entry=mock_config_entry,
device_id=device.id,
suggested_object_id="claude",
)
# Run migration
with patch(
"homeassistant.components.anthropic.async_setup_entry",
return_value=True,
):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert mock_config_entry.version == 2
assert mock_config_entry.data == {"api_key": "1234"}
assert mock_config_entry.options == {}
assert len(mock_config_entry.subentries) == 1
subentry = next(iter(mock_config_entry.subentries.values()))
assert subentry.unique_id is None
assert subentry.title == "Claude"
assert subentry.subentry_type == "conversation"
assert subentry.data == OPTIONS
migrated_entity = entity_registry.async_get(entity.entity_id)
assert migrated_entity is not None
assert migrated_entity.config_entry_id == mock_config_entry.entry_id
assert migrated_entity.config_subentry_id == subentry.subentry_id
assert migrated_entity.unique_id == subentry.subentry_id
# Check device migration
assert not device_registry.async_get_device(
identifiers={(DOMAIN, mock_config_entry.entry_id)}
)
assert (
migrated_device := device_registry.async_get_device(
identifiers={(DOMAIN, subentry.subentry_id)}
)
)
assert migrated_device.identifiers == {(DOMAIN, subentry.subentry_id)}
assert migrated_device.id == device.id
async def test_migration_from_v1_to_v2_with_multiple_keys(
hass: HomeAssistant,
device_registry: dr.DeviceRegistry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test migration from version 1 to version 2 with different API keys."""
# Create two v1 config entries with different API keys
options = {
"recommended": True,
"llm_hass_api": ["assist"],
"prompt": "You are a helpful assistant",
"chat_model": "claude-3-haiku-20240307",
}
mock_config_entry = MockConfigEntry(
domain=DOMAIN,
data={"api_key": "1234"},
options=options,
version=1,
title="Claude 1",
)
mock_config_entry.add_to_hass(hass)
mock_config_entry_2 = MockConfigEntry(
domain=DOMAIN,
data={"api_key": "12345"},
options=options,
version=1,
title="Claude 2",
)
mock_config_entry_2.add_to_hass(hass)
device = device_registry.async_get_or_create(
config_entry_id=mock_config_entry.entry_id,
identifiers={(DOMAIN, mock_config_entry.entry_id)},
name=mock_config_entry.title,
manufacturer="Anthropic",
model="Claude 1",
entry_type=dr.DeviceEntryType.SERVICE,
)
entity_registry.async_get_or_create(
"conversation",
DOMAIN,
mock_config_entry.entry_id,
config_entry=mock_config_entry,
device_id=device.id,
suggested_object_id="claude_1",
)
device_2 = device_registry.async_get_or_create(
config_entry_id=mock_config_entry_2.entry_id,
identifiers={(DOMAIN, mock_config_entry_2.entry_id)},
name=mock_config_entry_2.title,
manufacturer="Anthropic",
model="Claude 2",
entry_type=dr.DeviceEntryType.SERVICE,
)
entity_registry.async_get_or_create(
"conversation",
DOMAIN,
mock_config_entry_2.entry_id,
config_entry=mock_config_entry_2,
device_id=device_2.id,
suggested_object_id="claude_2",
)
# Run migration
with patch(
"homeassistant.components.anthropic.async_setup_entry",
return_value=True,
):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 2
for idx, entry in enumerate(entries):
assert entry.version == 2
assert not entry.options
assert len(entry.subentries) == 1
subentry = list(entry.subentries.values())[0]
assert subentry.subentry_type == "conversation"
assert subentry.data == options
assert subentry.title == f"Claude {idx + 1}"
dev = device_registry.async_get_device(
identifiers={(DOMAIN, list(entry.subentries.values())[0].subentry_id)}
)
assert dev is not None
async def test_migration_from_v1_to_v2_with_same_keys(
hass: HomeAssistant,
device_registry: dr.DeviceRegistry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test migration from version 1 to version 2 with same API keys consolidates entries."""
# Create two v1 config entries with the same API key
options = {
"recommended": True,
"llm_hass_api": ["assist"],
"prompt": "You are a helpful assistant",
"chat_model": "claude-3-haiku-20240307",
}
mock_config_entry = MockConfigEntry(
domain=DOMAIN,
data={"api_key": "1234"},
options=options,
version=1,
title="Claude",
)
mock_config_entry.add_to_hass(hass)
mock_config_entry_2 = MockConfigEntry(
domain=DOMAIN,
data={"api_key": "1234"}, # Same API key
options=options,
version=1,
title="Claude 2",
)
mock_config_entry_2.add_to_hass(hass)
device = device_registry.async_get_or_create(
config_entry_id=mock_config_entry.entry_id,
identifiers={(DOMAIN, mock_config_entry.entry_id)},
name=mock_config_entry.title,
manufacturer="Anthropic",
model="Claude",
entry_type=dr.DeviceEntryType.SERVICE,
)
entity_registry.async_get_or_create(
"conversation",
DOMAIN,
mock_config_entry.entry_id,
config_entry=mock_config_entry,
device_id=device.id,
suggested_object_id="claude",
)
device_2 = device_registry.async_get_or_create(
config_entry_id=mock_config_entry_2.entry_id,
identifiers={(DOMAIN, mock_config_entry_2.entry_id)},
name=mock_config_entry_2.title,
manufacturer="Anthropic",
model="Claude",
entry_type=dr.DeviceEntryType.SERVICE,
)
entity_registry.async_get_or_create(
"conversation",
DOMAIN,
mock_config_entry_2.entry_id,
config_entry=mock_config_entry_2,
device_id=device_2.id,
suggested_object_id="claude_2",
)
# Run migration
with patch(
"homeassistant.components.anthropic.async_setup_entry",
return_value=True,
):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
# Should have only one entry left (consolidated)
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
entry = entries[0]
assert entry.version == 2
assert not entry.options
assert len(entry.subentries) == 2 # Two subentries from the two original entries
# Check both subentries exist with correct data
subentries = list(entry.subentries.values())
titles = [sub.title for sub in subentries]
assert "Claude" in titles
assert "Claude 2" in titles
for subentry in subentries:
assert subentry.subentry_type == "conversation"
assert subentry.data == options
# Check devices were migrated correctly
dev = device_registry.async_get_device(
identifiers={(DOMAIN, subentry.subentry_id)}
)
assert dev is not None