Update OpenAI prompt on each interaction (#118747)

This commit is contained in:
Paulus Schoutsen 2024-06-03 16:27:05 -04:00 committed by GitHub
parent 8ea3a6843a
commit 299c0de968
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 93 additions and 53 deletions

View File

@ -146,14 +146,13 @@ class OpenAIConversationEntity(
messages = self.history[conversation_id] messages = self.history[conversation_id]
else: else:
conversation_id = ulid.ulid_now() conversation_id = ulid.ulid_now()
messages = []
if ( if (
user_input.context user_input.context
and user_input.context.user_id and user_input.context.user_id
and ( and (
user := await self.hass.auth.async_get_user( user := await self.hass.auth.async_get_user(user_input.context.user_id)
user_input.context.user_id
)
) )
): ):
user_name = user.name user_name = user.name
@ -193,11 +192,12 @@ class OpenAIConversationEntity(
response=intent_response, conversation_id=conversation_id response=intent_response, conversation_id=conversation_id
) )
messages = [ChatCompletionSystemMessageParam(role="system", content=prompt)] # Create a copy of the variable because we attach it to the trace
messages = [
messages.append( ChatCompletionSystemMessageParam(role="system", content=prompt),
ChatCompletionUserMessageParam(role="user", content=user_input.text) *messages[1:],
) ChatCompletionUserMessageParam(role="user", content=user_input.text),
]
LOGGER.debug("Prompt: %s", messages) LOGGER.debug("Prompt: %s", messages)
trace.async_conversation_trace_append( trace.async_conversation_trace_append(

View File

@ -2,6 +2,7 @@
from unittest.mock import AsyncMock, Mock, patch from unittest.mock import AsyncMock, Mock, patch
from freezegun import freeze_time
from httpx import Response from httpx import Response
from openai import RateLimitError from openai import RateLimitError
from openai.types.chat.chat_completion import ChatCompletion, Choice from openai.types.chat.chat_completion import ChatCompletion, Choice
@ -214,11 +215,14 @@ async def test_function_call(
), ),
) )
with patch( with (
patch(
"openai.resources.chat.completions.AsyncCompletions.create", "openai.resources.chat.completions.AsyncCompletions.create",
new_callable=AsyncMock, new_callable=AsyncMock,
side_effect=completion_result, side_effect=completion_result,
) as mock_create: ) as mock_create,
freeze_time("2024-06-03 23:00:00"),
):
result = await conversation.async_converse( result = await conversation.async_converse(
hass, hass,
"Please call the test function", "Please call the test function",
@ -227,6 +231,11 @@ async def test_function_call(
agent_id=agent_id, agent_id=agent_id,
) )
assert (
"Today's date is 2024-06-03."
in mock_create.mock_calls[1][2]["messages"][0]["content"]
)
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
assert mock_create.mock_calls[1][2]["messages"][3] == { assert mock_create.mock_calls[1][2]["messages"][3] == {
"role": "tool", "role": "tool",
@ -262,6 +271,37 @@ async def test_function_call(
# AGENT_DETAIL event contains the raw prompt passed to the model # AGENT_DETAIL event contains the raw prompt passed to the model
detail_event = trace_events[1] detail_event = trace_events[1]
assert "Answer in plain text" in detail_event["data"]["messages"][0]["content"] assert "Answer in plain text" in detail_event["data"]["messages"][0]["content"]
assert (
"Today's date is 2024-06-03."
in trace_events[1]["data"]["messages"][0]["content"]
)
# Call it again, make sure we have updated prompt
with (
patch(
"openai.resources.chat.completions.AsyncCompletions.create",
new_callable=AsyncMock,
side_effect=completion_result,
) as mock_create,
freeze_time("2024-06-04 23:00:00"),
):
result = await conversation.async_converse(
hass,
"Please call the test function",
None,
context,
agent_id=agent_id,
)
assert (
"Today's date is 2024-06-04."
in mock_create.mock_calls[1][2]["messages"][0]["content"]
)
# Test old assert message not updated
assert (
"Today's date is 2024-06-03."
in trace_events[1]["data"]["messages"][0]["content"]
)
@patch( @patch(