Drop user prompt from LLMContext (#146787)

This commit is contained in:
Paulus Schoutsen 2025-06-13 22:01:39 -04:00 committed by GitHub
parent 56aa809074
commit 059c12798d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 12 additions and 19 deletions

View File

@ -395,7 +395,6 @@ class ChatLog:
llm_context = llm.LLMContext( llm_context = llm.LLMContext(
platform=conversing_domain, platform=conversing_domain,
context=user_input.context, context=user_input.context,
user_prompt=user_input.text,
language=user_input.language, language=user_input.language,
assistant=DOMAIN, assistant=DOMAIN,
device_id=user_input.device_id, device_id=user_input.device_id,

View File

@ -88,7 +88,6 @@ class ModelContextProtocolSSEView(HomeAssistantView):
context = llm.LLMContext( context = llm.LLMContext(
platform=DOMAIN, platform=DOMAIN,
context=self.context(request), context=self.context(request),
user_prompt=None,
language="*", language="*",
assistant=conversation.DOMAIN, assistant=conversation.DOMAIN,
device_id=None, device_id=None,

View File

@ -160,11 +160,19 @@ class LLMContext:
"""Tool input to be processed.""" """Tool input to be processed."""
platform: str platform: str
"""Integration that is handling the LLM request."""
context: Context | None context: Context | None
user_prompt: str | None """Context of the LLM request."""
language: str | None language: str | None
"""Language of the LLM request."""
assistant: str | None assistant: str | None
"""Assistant domain that is handling the LLM request."""
device_id: str | None device_id: str | None
"""Device that is making the request."""
@dataclass(slots=True) @dataclass(slots=True)
@ -302,7 +310,7 @@ class IntentTool(Tool):
platform=llm_context.platform, platform=llm_context.platform,
intent_type=self.name, intent_type=self.name,
slots=slots, slots=slots,
text_input=llm_context.user_prompt, text_input=None,
context=llm_context.context, context=llm_context.context,
language=llm_context.language, language=llm_context.language,
assistant=llm_context.assistant, assistant=llm_context.assistant,

View File

@ -415,7 +415,6 @@ async def test_function_call(
llm.LLMContext( llm.LLMContext(
platform="anthropic", platform="anthropic",
context=context, context=context,
user_prompt="Please call the test function",
language="en", language="en",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,
@ -510,7 +509,6 @@ async def test_function_exception(
llm.LLMContext( llm.LLMContext(
platform="anthropic", platform="anthropic",
context=context, context=context,
user_prompt="Please call the test function",
language="en", language="en",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,

View File

@ -58,7 +58,6 @@ def create_llm_context() -> llm.LLMContext:
return llm.LLMContext( return llm.LLMContext(
platform="test_platform", platform="test_platform",
context=Context(), context=Context(),
user_prompt="test_text",
language="*", language="*",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,

View File

@ -284,7 +284,6 @@ async def test_function_call(
llm.LLMContext( llm.LLMContext(
platform="ollama", platform="ollama",
context=context, context=context,
user_prompt="Please call the test function",
language="en", language="en",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,
@ -369,7 +368,6 @@ async def test_function_exception(
llm.LLMContext( llm.LLMContext(
platform="ollama", platform="ollama",
context=context, context=context,
user_prompt="Please call the test function",
language="en", language="en",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,

View File

@ -36,7 +36,6 @@ def llm_context() -> llm.LLMContext:
return llm.LLMContext( return llm.LLMContext(
platform="", platform="",
context=None, context=None,
user_prompt=None,
language=None, language=None,
assistant=None, assistant=None,
device_id=None, device_id=None,
@ -162,7 +161,6 @@ async def test_assist_api(
llm_context = llm.LLMContext( llm_context = llm.LLMContext(
platform="test_platform", platform="test_platform",
context=test_context, context=test_context,
user_prompt="test_text",
language="*", language="*",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,
@ -237,7 +235,7 @@ async def test_assist_api(
"area": {"value": "kitchen"}, "area": {"value": "kitchen"},
"floor": {"value": "ground_floor"}, "floor": {"value": "ground_floor"},
}, },
text_input="test_text", text_input=None,
context=test_context, context=test_context,
language="*", language="*",
assistant="conversation", assistant="conversation",
@ -296,7 +294,7 @@ async def test_assist_api(
"preferred_area_id": {"value": area.id}, "preferred_area_id": {"value": area.id},
"preferred_floor_id": {"value": floor.floor_id}, "preferred_floor_id": {"value": floor.floor_id},
}, },
text_input="test_text", text_input=None,
context=test_context, context=test_context,
language="*", language="*",
assistant="conversation", assistant="conversation",
@ -412,7 +410,6 @@ async def test_assist_api_prompt(
llm_context = llm.LLMContext( llm_context = llm.LLMContext(
platform="test_platform", platform="test_platform",
context=context, context=context,
user_prompt="test_text",
language="*", language="*",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,
@ -760,7 +757,6 @@ async def test_script_tool(
llm_context = llm.LLMContext( llm_context = llm.LLMContext(
platform="test_platform", platform="test_platform",
context=context, context=context,
user_prompt="test_text",
language="*", language="*",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,
@ -961,7 +957,6 @@ async def test_script_tool_name(hass: HomeAssistant) -> None:
llm_context = llm.LLMContext( llm_context = llm.LLMContext(
platform="test_platform", platform="test_platform",
context=context, context=context,
user_prompt="test_text",
language="*", language="*",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,
@ -1241,7 +1236,6 @@ async def test_calendar_get_events_tool(hass: HomeAssistant) -> None:
llm_context = llm.LLMContext( llm_context = llm.LLMContext(
platform="test_platform", platform="test_platform",
context=context, context=context,
user_prompt="test_text",
language="*", language="*",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,
@ -1344,7 +1338,6 @@ async def test_todo_get_items_tool(hass: HomeAssistant) -> None:
llm_context = llm.LLMContext( llm_context = llm.LLMContext(
platform="test_platform", platform="test_platform",
context=context, context=context,
user_prompt="test_text",
language="*", language="*",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,
@ -1451,7 +1444,6 @@ async def test_no_tools_exposed(hass: HomeAssistant) -> None:
llm_context = llm.LLMContext( llm_context = llm.LLMContext(
platform="test_platform", platform="test_platform",
context=context, context=context,
user_prompt="test_text",
language="*", language="*",
assistant="conversation", assistant="conversation",
device_id=None, device_id=None,