Allow selection of multiple LLM APIs in Anthropic (#143190)

This commit is contained in:
Allen Porter 2025-04-19 03:34:51 -07:00 committed by GitHub
parent 879cdcc0a4
commit 3da77726d0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 32 additions and 18 deletions

View File

@ -52,7 +52,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
RECOMMENDED_OPTIONS = {
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
}
@ -134,9 +134,8 @@ class AnthropicOptionsFlow(OptionsFlow):
if user_input is not None:
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
if user_input[CONF_LLM_HASS_API] == "none":
user_input.pop(CONF_LLM_HASS_API)
if not user_input.get(CONF_LLM_HASS_API):
user_input.pop(CONF_LLM_HASS_API, None)
if user_input.get(
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
@ -151,12 +150,16 @@ class AnthropicOptionsFlow(OptionsFlow):
options = {
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
CONF_PROMPT: user_input[CONF_PROMPT],
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
}
suggested_values = options.copy()
if not suggested_values.get(CONF_PROMPT):
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
if (
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
) and isinstance(suggested_llm_apis, str):
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
schema = self.add_suggested_values_to_schema(
vol.Schema(anthropic_config_option_schema(self.hass, options)),
@ -176,24 +179,18 @@ def anthropic_config_option_schema(
) -> dict:
"""Return a schema for Anthropic completion options."""
hass_apis: list[SelectOptionDict] = [
SelectOptionDict(
label="No control",
value="none",
)
]
hass_apis.extend(
SelectOptionDict(
label=api.name,
value=api.id,
)
for api in llm.async_get_apis(hass)
)
]
schema = {
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector(
SelectSelectorConfig(options=hass_apis)
),
vol.Optional(
CONF_LLM_HASS_API,
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
vol.Required(
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
): bool,

View File

@ -196,13 +196,13 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
(
{
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: "none",
CONF_PROMPT: "bla",
},
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_TEMPERATURE: 0.3,
CONF_LLM_HASS_API: [],
},
{
CONF_RECOMMENDED: False,
@ -224,15 +224,32 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
},
{
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: "assist",
CONF_LLM_HASS_API: ["assist"],
CONF_PROMPT: "",
},
{
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: "assist",
CONF_LLM_HASS_API: ["assist"],
CONF_PROMPT: "",
},
),
(
{
CONF_RECOMMENDED: True,
CONF_PROMPT: "",
CONF_LLM_HASS_API: "assist",
},
{
CONF_RECOMMENDED: True,
CONF_PROMPT: "",
CONF_LLM_HASS_API: ["assist"],
},
{
CONF_RECOMMENDED: True,
CONF_PROMPT: "",
CONF_LLM_HASS_API: ["assist"],
},
),
],
)
async def test_options_switching(