Add OpenRouter integration (#143098)

This commit is contained in:
Joost Lekkerkerker 2025-07-16 10:55:28 +02:00 committed by GitHub
parent 8a73511b02
commit a57d48fd31
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 836 additions and 0 deletions

View File

@ -377,6 +377,7 @@ homeassistant.components.onedrive.*
homeassistant.components.onewire.*
homeassistant.components.onkyo.*
homeassistant.components.open_meteo.*
homeassistant.components.open_router.*
homeassistant.components.openai_conversation.*
homeassistant.components.openexchangerates.*
homeassistant.components.opensky.*

2
CODEOWNERS generated
View File

@ -1102,6 +1102,8 @@ build.json @home-assistant/supervisor
/tests/components/onvif/ @hunterjm @jterrace
/homeassistant/components/open_meteo/ @frenck
/tests/components/open_meteo/ @frenck
/homeassistant/components/open_router/ @joostlek
/tests/components/open_router/ @joostlek
/homeassistant/components/openai_conversation/ @balloob
/tests/components/openai_conversation/ @balloob
/homeassistant/components/openerz/ @misialq

View File

@ -0,0 +1,58 @@
"""The OpenRouter integration."""
from __future__ import annotations
from openai import AsyncOpenAI, AuthenticationError, OpenAIError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.helpers.httpx_client import get_async_client
from .const import LOGGER
PLATFORMS = [Platform.CONVERSATION]
type OpenRouterConfigEntry = ConfigEntry[AsyncOpenAI]
async def async_setup_entry(hass: HomeAssistant, entry: OpenRouterConfigEntry) -> bool:
"""Set up OpenRouter from a config entry."""
client = AsyncOpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=entry.data[CONF_API_KEY],
http_client=get_async_client(hass),
)
# Cache current platform data which gets added to each request (caching done by library)
_ = await hass.async_add_executor_job(client.platform_headers)
try:
async for _ in client.with_options(timeout=10.0).models.list():
break
except AuthenticationError as err:
LOGGER.error("Invalid API key: %s", err)
raise ConfigEntryError("Invalid API key") from err
except OpenAIError as err:
raise ConfigEntryNotReady(err) from err
entry.runtime_data = client
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
return True
async def _async_update_listener(
hass: HomeAssistant, entry: OpenRouterConfigEntry
) -> None:
"""Handle update."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: OpenRouterConfigEntry) -> bool:
"""Unload OpenRouter."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -0,0 +1,118 @@
"""Config flow for OpenRouter integration."""
from __future__ import annotations
import logging
from typing import Any
from openai import AsyncOpenAI
from python_open_router import OpenRouterClient, OpenRouterError
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryFlow,
SubentryFlowResult,
)
from homeassistant.const import CONF_API_KEY, CONF_MODEL
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.httpx_client import get_async_client
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class OpenRouterConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for OpenRouter."""
VERSION = 1
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this handler."""
return {"conversation": ConversationFlowHandler}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors = {}
if user_input is not None:
self._async_abort_entries_match(user_input)
client = OpenRouterClient(
user_input[CONF_API_KEY], async_get_clientsession(self.hass)
)
try:
await client.get_key_data()
except OpenRouterError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(
title="OpenRouter",
data=user_input,
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_API_KEY): str,
}
),
errors=errors,
)
class ConversationFlowHandler(ConfigSubentryFlow):
"""Handle subentry flow."""
def __init__(self) -> None:
"""Initialize the subentry flow."""
self.options: dict[str, str] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""User flow to create a sensor subentry."""
if user_input is not None:
return self.async_create_entry(
title=self.options[user_input[CONF_MODEL]], data=user_input
)
entry = self._get_entry()
client = AsyncOpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=entry.data[CONF_API_KEY],
http_client=get_async_client(self.hass),
)
options = []
async for model in client.with_options(timeout=10.0).models.list():
options.append(SelectOptionDict(value=model.id, label=model.name)) # type: ignore[attr-defined]
self.options[model.id] = model.name # type: ignore[attr-defined]
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_MODEL): SelectSelector(
SelectSelectorConfig(
options=options, mode=SelectSelectorMode.DROPDOWN, sort=True
),
),
}
),
)

View File

@ -0,0 +1,6 @@
"""Constants for the OpenRouter integration."""
import logging
DOMAIN = "open_router"
LOGGER = logging.getLogger(__package__)

View File

@ -0,0 +1,133 @@
"""Conversation support for OpenRouter."""
from typing import Literal
import openai
from openai.types.chat import (
ChatCompletionAssistantMessageParam,
ChatCompletionMessageParam,
ChatCompletionSystemMessageParam,
ChatCompletionUserMessageParam,
)
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigSubentry
from homeassistant.const import CONF_LLM_HASS_API, CONF_MODEL, MATCH_ALL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import intent
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import OpenRouterConfigEntry
from .const import DOMAIN, LOGGER
async def async_setup_entry(
hass: HomeAssistant,
config_entry: OpenRouterConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up conversation entities."""
for subentry_id, subentry in config_entry.subentries.items():
async_add_entities(
[OpenRouterConversationEntity(config_entry, subentry)],
config_subentry_id=subentry_id,
)
def _convert_content_to_chat_message(
content: conversation.Content,
) -> ChatCompletionMessageParam | None:
"""Convert any native chat message for this agent to the native format."""
LOGGER.debug("_convert_content_to_chat_message=%s", content)
if isinstance(content, conversation.ToolResultContent):
return None
role: Literal["user", "assistant", "system"] = content.role
if role == "system" and content.content:
return ChatCompletionSystemMessageParam(role="system", content=content.content)
if role == "user" and content.content:
return ChatCompletionUserMessageParam(role="user", content=content.content)
if role == "assistant":
return ChatCompletionAssistantMessageParam(
role="assistant", content=content.content
)
LOGGER.warning("Could not convert message to Completions API: %s", content)
return None
class OpenRouterConversationEntity(conversation.ConversationEntity):
"""OpenRouter conversation agent."""
def __init__(self, entry: OpenRouterConfigEntry, subentry: ConfigSubentry) -> None:
"""Initialize the agent."""
self.entry = entry
self.subentry = subentry
self.model = subentry.data[CONF_MODEL]
self._attr_name = subentry.title
self._attr_unique_id = subentry.subentry_id
@property
def supported_languages(self) -> list[str] | Literal["*"]:
"""Return a list of supported languages."""
return MATCH_ALL
async def _async_handle_message(
self,
user_input: conversation.ConversationInput,
chat_log: conversation.ChatLog,
) -> conversation.ConversationResult:
"""Process a sentence."""
options = self.subentry.data
try:
await chat_log.async_provide_llm_data(
user_input.as_llm_context(DOMAIN),
options.get(CONF_LLM_HASS_API),
None,
user_input.extra_system_prompt,
)
except conversation.ConverseError as err:
return err.as_conversation_result()
messages = [
m
for content in chat_log.content
if (m := _convert_content_to_chat_message(content))
]
client = self.entry.runtime_data
try:
result = await client.chat.completions.create(
model=self.model,
messages=messages,
user=chat_log.conversation_id,
extra_headers={
"X-Title": "Home Assistant",
"HTTP-Referer": "https://www.home-assistant.io/integrations/open_router",
},
)
except openai.OpenAIError as err:
LOGGER.error("Error talking to API: %s", err)
raise HomeAssistantError("Error talking to API") from err
result_message = result.choices[0].message
chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(
agent_id=user_input.agent_id,
content=result_message.content,
)
)
intent_response = intent.IntentResponse(language=user_input.language)
assert type(chat_log.content[-1]) is conversation.AssistantContent
intent_response.async_set_speech(chat_log.content[-1].content or "")
return conversation.ConversationResult(
response=intent_response,
conversation_id=chat_log.conversation_id,
continue_conversation=chat_log.continue_conversation,
)

View File

@ -0,0 +1,13 @@
{
"domain": "open_router",
"name": "OpenRouter",
"after_dependencies": ["assist_pipeline", "intent"],
"codeowners": ["@joostlek"],
"config_flow": true,
"dependencies": ["conversation"],
"documentation": "https://www.home-assistant.io/integrations/open_router",
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["openai==1.93.3", "python-open-router==0.2.0"]
}

View File

@ -0,0 +1,88 @@
rules:
# Bronze
action-setup:
status: exempt
comment: No actions are implemented
appropriate-polling:
status: exempt
comment: the integration does not poll
brands: done
common-modules:
status: exempt
comment: the integration currently implements only one platform and has no coordinator
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: No actions are implemented
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: the integration does not subscribe to events
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: the integration has no options
docs-installation-parameters: done
entity-unavailable:
status: exempt
comment: the integration only implements a stateless conversation entity.
integration-owner: done
log-when-unavailable:
status: exempt
comment: the integration only integrates state-less entities
parallel-updates: todo
reauthentication-flow: todo
test-coverage: todo
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: Service can't be discovered
discovery:
status: exempt
comment: Service can't be discovered
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class:
status: exempt
comment: no suitable device class for the conversation entity
entity-disabled-by-default:
status: exempt
comment: only one conversation entity
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: the integration has no repairs
stale-devices:
status: exempt
comment: only one device per entry, is deleted with the entry.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@ -0,0 +1,37 @@
{
"config": {
"step": {
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "An OpenRouter API key"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
}
},
"config_subentries": {
"conversation": {
"step": {
"user": {
"description": "Configure the new conversation agent",
"data": {
"model": "Model"
}
}
},
"initiate_flow": {
"user": "Add conversation agent"
},
"entry_type": "Conversation agent"
}
}
}

View File

@ -449,6 +449,7 @@ FLOWS = {
"onkyo",
"onvif",
"open_meteo",
"open_router",
"openai_conversation",
"openexchangerates",
"opengarage",

View File

@ -4621,6 +4621,12 @@
"config_flow": true,
"iot_class": "cloud_polling"
},
"open_router": {
"name": "OpenRouter",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
"openai_conversation": {
"name": "OpenAI Conversation",
"integration_type": "service",

10
mypy.ini generated
View File

@ -3526,6 +3526,16 @@ disallow_untyped_defs = true
warn_return_any = true
warn_unreachable = true
[mypy-homeassistant.components.open_router.*]
check_untyped_defs = true
disallow_incomplete_defs = true
disallow_subclassing_any = true
disallow_untyped_calls = true
disallow_untyped_decorators = true
disallow_untyped_defs = true
warn_return_any = true
warn_unreachable = true
[mypy-homeassistant.components.openai_conversation.*]
check_untyped_defs = true
disallow_incomplete_defs = true

4
requirements_all.txt generated
View File

@ -1596,6 +1596,7 @@ open-garage==0.2.0
# homeassistant.components.open_meteo
open-meteo==0.3.2
# homeassistant.components.open_router
# homeassistant.components.openai_conversation
openai==1.93.3
@ -2476,6 +2477,9 @@ python-mpd2==3.1.1
# homeassistant.components.mystrom
python-mystrom==2.4.0
# homeassistant.components.open_router
python-open-router==0.2.0
# homeassistant.components.swiss_public_transport
python-opendata-transport==0.5.0

View File

@ -1364,6 +1364,7 @@ open-garage==0.2.0
# homeassistant.components.open_meteo
open-meteo==0.3.2
# homeassistant.components.open_router
# homeassistant.components.openai_conversation
openai==1.93.3
@ -2049,6 +2050,9 @@ python-mpd2==3.1.1
# homeassistant.components.mystrom
python-mystrom==2.4.0
# homeassistant.components.open_router
python-open-router==0.2.0
# homeassistant.components.swiss_public_transport
python-opendata-transport==0.5.0

View File

@ -0,0 +1,13 @@
"""Tests for the OpenRouter integration."""
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None:
"""Fixture for setting up the component."""
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()

View File

@ -0,0 +1,128 @@
"""Fixtures for OpenRouter integration tests."""
from collections.abc import AsyncGenerator, Generator
from dataclasses import dataclass
from unittest.mock import AsyncMock, MagicMock, patch
from openai.types import CompletionUsage
from openai.types.chat import ChatCompletion, ChatCompletionMessage
from openai.types.chat.chat_completion import Choice
import pytest
from homeassistant.components.open_router.const import DOMAIN
from homeassistant.config_entries import ConfigSubentryData
from homeassistant.const import CONF_API_KEY, CONF_MODEL
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
@pytest.fixture
def mock_setup_entry() -> Generator[AsyncMock]:
"""Override async_setup_entry."""
with patch(
"homeassistant.components.open_router.async_setup_entry",
return_value=True,
) as mock_setup_entry:
yield mock_setup_entry
@pytest.fixture
def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
"""Mock a config entry."""
return MockConfigEntry(
title="OpenRouter",
domain=DOMAIN,
data={
CONF_API_KEY: "bla",
},
subentries_data=[
ConfigSubentryData(
data={CONF_MODEL: "gpt-3.5-turbo"},
subentry_id="ABCDEF",
subentry_type="conversation",
title="GPT-3.5 Turbo",
unique_id=None,
)
],
)
@dataclass
class Model:
"""Mock model data."""
id: str
name: str
@pytest.fixture
async def mock_openai_client() -> AsyncGenerator[AsyncMock]:
"""Initialize integration."""
with (
patch("homeassistant.components.open_router.AsyncOpenAI") as mock_client,
patch(
"homeassistant.components.open_router.config_flow.AsyncOpenAI",
new=mock_client,
),
):
client = mock_client.return_value
client.with_options = MagicMock()
client.with_options.return_value.models = MagicMock()
client.with_options.return_value.models.list.return_value = (
get_generator_from_data(
[
Model(id="gpt-4", name="GPT-4"),
Model(id="gpt-3.5-turbo", name="GPT-3.5 Turbo"),
],
)
)
client.chat.completions.create = AsyncMock(
return_value=ChatCompletion(
id="chatcmpl-1234567890ABCDEFGHIJKLMNOPQRS",
choices=[
Choice(
finish_reason="stop",
index=0,
message=ChatCompletionMessage(
content="Hello, how can I help you?",
role="assistant",
function_call=None,
tool_calls=None,
),
)
],
created=1700000000,
model="gpt-3.5-turbo-0613",
object="chat.completion",
system_fingerprint=None,
usage=CompletionUsage(
completion_tokens=9, prompt_tokens=8, total_tokens=17
),
)
)
yield client
@pytest.fixture
async def mock_open_router_client() -> AsyncGenerator[AsyncMock]:
"""Initialize integration."""
with patch(
"homeassistant.components.open_router.config_flow.OpenRouterClient",
autospec=True,
) as mock_client:
client = mock_client.return_value
yield client
@pytest.fixture(autouse=True)
async def setup_ha(hass: HomeAssistant) -> None:
"""Set up Home Assistant."""
assert await async_setup_component(hass, "homeassistant", {})
async def get_generator_from_data[DataT](items: list[DataT]) -> AsyncGenerator[DataT]:
"""Return async generator."""
for item in items:
yield item

View File

@ -0,0 +1,16 @@
# serializer version: 1
# name: test_default_prompt
list([
dict({
'attachments': None,
'content': 'hello',
'role': 'user',
}),
dict({
'agent_id': 'conversation.gpt_3_5_turbo',
'content': 'Hello, how can I help you?',
'role': 'assistant',
'tool_calls': None,
}),
])
# ---

View File

@ -0,0 +1,146 @@
"""Test the OpenRouter config flow."""
from unittest.mock import AsyncMock
import pytest
from python_open_router import OpenRouterError
from homeassistant.components.open_router.const import DOMAIN
from homeassistant.config_entries import SOURCE_USER, ConfigSubentry
from homeassistant.const import CONF_API_KEY, CONF_MODEL
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from . import setup_integration
from tests.common import MockConfigEntry
async def test_full_flow(
hass: HomeAssistant,
mock_open_router_client: AsyncMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test the full config flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert not result["errors"]
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_API_KEY: "bla"}
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["data"] == {CONF_API_KEY: "bla"}
@pytest.mark.parametrize(
("exception", "error"),
[
(OpenRouterError("exception"), "cannot_connect"),
(Exception, "unknown"),
],
)
async def test_form_errors(
hass: HomeAssistant,
mock_open_router_client: AsyncMock,
mock_setup_entry: AsyncMock,
exception: Exception,
error: str,
) -> None:
"""Test we handle errors from the OpenRouter API."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
mock_open_router_client.get_key_data.side_effect = exception
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_API_KEY: "bla"},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {"base": error}
mock_open_router_client.get_key_data.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_API_KEY: "bla"},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
async def test_duplicate_entry(
hass: HomeAssistant,
mock_open_router_client: AsyncMock,
mock_setup_entry: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test aborting the flow if an entry already exists."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert not result["errors"]
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_API_KEY: "bla"},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
async def test_create_conversation_agent(
hass: HomeAssistant,
mock_open_router_client: AsyncMock,
mock_openai_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test creating a conversation agent."""
mock_config_entry.add_to_hass(hass)
await setup_integration(hass, mock_config_entry)
result = await hass.config_entries.subentries.async_init(
(mock_config_entry.entry_id, "conversation"),
context={"source": SOURCE_USER},
)
assert result["type"] is FlowResultType.FORM
assert not result["errors"]
assert result["step_id"] == "user"
assert result["data_schema"].schema["model"].config["options"] == [
{"value": "gpt-3.5-turbo", "label": "GPT-3.5 Turbo"},
]
result = await hass.config_entries.subentries.async_configure(
result["flow_id"],
{CONF_MODEL: "gpt-3.5-turbo"},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
subentry_id = list(mock_config_entry.subentries)[0]
assert (
ConfigSubentry(
data={CONF_MODEL: "gpt-3.5-turbo"},
subentry_id=subentry_id,
subentry_type="conversation",
title="GPT-3.5 Turbo",
unique_id=None,
)
in mock_config_entry.subentries.values()
)

View File

@ -0,0 +1,52 @@
"""Tests for the OpenRouter integration."""
from unittest.mock import AsyncMock
from freezegun import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components import conversation
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import area_registry as ar, device_registry as dr, intent
from . import setup_integration
from tests.common import MockConfigEntry
from tests.components.conversation import MockChatLog, mock_chat_log # noqa: F401
@pytest.fixture(autouse=True)
def freeze_the_time():
"""Freeze the time."""
with freeze_time("2024-05-24 12:00:00", tz_offset=0):
yield
async def test_default_prompt(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
area_registry: ar.AreaRegistry,
device_registry: dr.DeviceRegistry,
snapshot: SnapshotAssertion,
mock_openai_client: AsyncMock,
mock_chat_log: MockChatLog, # noqa: F811
) -> None:
"""Test that the default prompt works."""
await setup_integration(hass, mock_config_entry)
result = await conversation.async_converse(
hass,
"hello",
mock_chat_log.conversation_id,
Context(),
agent_id="conversation.gpt_3_5_turbo",
)
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
assert mock_chat_log.content[1:] == snapshot
call = mock_openai_client.chat.completions.create.call_args_list[0][1]
assert call["model"] == "gpt-3.5-turbo"
assert call["extra_headers"] == {
"HTTP-Referer": "https://www.home-assistant.io/integrations/open_router",
"X-Title": "Home Assistant",
}