diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index c89981e67bb..1fe02ac2536 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -1,446 +1,4 @@ # serializer version: 1 -# name: test_chat_history[models/gemini-1.0-pro-False] - list([ - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-1.0-pro', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': None, - 'tools': None, - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - dict({ - 'parts': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - ''', - 'role': 'user', - }), - dict({ - 'parts': 'Ok', - 'role': 'model', - }), - dict({ - 'parts': '1st user request', - 'role': 'user', - }), - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - '1st user request', - ), - dict({ - }), - ), - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-1.0-pro', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': None, - 'tools': None, - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - dict({ - 'parts': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - ''', - 'role': 'user', - }), - dict({ - 'parts': 'Ok', - 'role': 'model', - }), - dict({ - 'parts': '1st user request', - 'role': 'user', - }), - dict({ - 'parts': '1st model response', - 'role': 'model', - }), - dict({ - 'parts': '2nd user request', - 'role': 'user', - }), - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - '2nd user request', - ), - dict({ - }), - ), - ]) -# --- -# name: test_chat_history[models/gemini-1.5-pro-True] - list([ - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-1.5-pro', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - ''', - 'tools': None, - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - dict({ - 'parts': '1st user request', - 'role': 'user', - }), - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - '1st user request', - ), - dict({ - }), - ), - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-1.5-pro', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - ''', - 'tools': None, - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - dict({ - 'parts': '1st user request', - 'role': 'user', - }), - dict({ - 'parts': '1st model response', - 'role': 'model', - }), - dict({ - 'parts': '2nd user request', - 'role': 'user', - }), - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - '2nd user request', - ), - dict({ - }), - ), - ]) -# --- -# name: test_default_prompt[config_entry_options0-0-None] - list([ - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-2.0-flash', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - ''', - 'tools': None, - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - dict({ - 'parts': 'hello', - 'role': 'user', - }), - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - 'hello', - ), - dict({ - }), - ), - ]) -# --- -# name: test_default_prompt[config_entry_options0-0-conversation.google_generative_ai_conversation] - list([ - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-2.0-flash', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - ''', - 'tools': None, - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - dict({ - 'parts': 'hello', - 'role': 'user', - }), - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - 'hello', - ), - dict({ - }), - ), - ]) -# --- -# name: test_default_prompt[config_entry_options1-1-None] - list([ - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-2.0-flash', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - - ''', - 'tools': None, - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - dict({ - 'parts': 'hello', - 'role': 'user', - }), - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - 'hello', - ), - dict({ - }), - ), - ]) -# --- -# name: test_default_prompt[config_entry_options1-1-conversation.google_generative_ai_conversation] - list([ - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-2.0-flash', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - - ''', - 'tools': None, - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - dict({ - 'parts': 'hello', - 'role': 'user', - }), - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - 'hello', - ), - dict({ - }), - ), - ]) -# --- # name: test_function_call list([ tuple( diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 72a5390f4b1..9b255666a67 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -13,20 +13,16 @@ import voluptuous as vol from homeassistant.components import conversation from homeassistant.components.conversation import trace -from homeassistant.components.google_generative_ai_conversation.const import ( - CONF_CHAT_MODEL, -) from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, _format_schema, ) -from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API +from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent, llm from tests.common import MockConfigEntry -from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) @@ -43,143 +39,6 @@ def mock_ulid_tools(): yield -@pytest.mark.parametrize( - "agent_id", [None, "conversation.google_generative_ai_conversation"] -) -@pytest.mark.parametrize( - ("config_entry_options", "expected_features"), - [ - ({}, 0), - ( - {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, - conversation.ConversationEntityFeature.CONTROL, - ), - ], -) -@pytest.mark.usefixtures("mock_init_component") -async def test_default_prompt( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - agent_id: str | None, - config_entry_options: {}, - expected_features: conversation.ConversationEntityFeature, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test that the default prompt works.""" - entry = MockConfigEntry(title=None) - entry.add_to_hass(hass) - - if agent_id is None: - agent_id = mock_config_entry.entry_id - - hass.config_entries.async_update_entry( - mock_config_entry, - options={**mock_config_entry.options, **config_entry_options}, - ) - - with ( - patch("google.generativeai.GenerativeModel") as mock_model, - patch( - "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools", - return_value=[], - ) as mock_get_tools, - patch( - "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_api_prompt", - return_value="", - ), - ): - mock_chat = AsyncMock() - mock_model.return_value.start_chat.return_value = mock_chat - chat_response = MagicMock() - mock_chat.send_message_async.return_value = chat_response - mock_part = MagicMock() - mock_part.function_call = None - mock_part.text = "Hi there!\n" - chat_response.parts = [mock_part] - result = await conversation.async_converse( - hass, - "hello", - None, - Context(), - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot - assert mock_get_tools.called == (CONF_LLM_HASS_API in config_entry_options) - - state = hass.states.get("conversation.google_generative_ai_conversation") - assert state.attributes[ATTR_SUPPORTED_FEATURES] == expected_features - - -@pytest.mark.parametrize( - ("model_name", "supports_system_instruction"), - [("models/gemini-1.5-pro", True), ("models/gemini-1.0-pro", False)], -) -@pytest.mark.usefixtures("mock_init_component") -async def test_chat_history( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - model_name: str, - supports_system_instruction: bool, - snapshot: SnapshotAssertion, -) -> None: - """Test that the agent keeps track of the chat history.""" - hass.config_entries.async_update_entry( - mock_config_entry, options={CONF_CHAT_MODEL: model_name} - ) - with patch("google.generativeai.GenerativeModel") as mock_model: - mock_chat = AsyncMock() - mock_model.return_value.start_chat.return_value = mock_chat - chat_response = MagicMock() - mock_chat.send_message_async.return_value = chat_response - mock_part = MagicMock() - mock_part.function_call = None - mock_part.text = "1st model response" - chat_response.parts = [mock_part] - if supports_system_instruction: - mock_chat.history = [] - else: - mock_chat.history = [ - {"role": "user", "parts": "prompt"}, - {"role": "model", "parts": "Ok"}, - ] - mock_chat.history += [ - {"role": "user", "parts": "1st user request"}, - {"role": "model", "parts": "1st model response"}, - ] - result = await conversation.async_converse( - hass, - "1st user request", - None, - Context(), - agent_id=mock_config_entry.entry_id, - ) - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert ( - result.response.as_dict()["speech"]["plain"]["speech"] - == "1st model response" - ) - mock_part.text = "2nd model response" - chat_response.parts = [mock_part] - result = await conversation.async_converse( - hass, - "2nd user request", - result.conversation_id, - Context(), - agent_id=mock_config_entry.entry_id, - ) - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert ( - result.response.as_dict()["speech"]["plain"]["speech"] - == "2nd model response" - ) - - assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot - - @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" ) @@ -539,10 +398,10 @@ async def test_empty_response( @pytest.mark.usefixtures("mock_init_component") -async def test_invalid_llm_api( +async def test_converse_error( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: - """Test handling of invalid llm api.""" + """Test handling ChatLog raising ConverseError.""" hass.config_entries.async_update_entry( mock_config_entry, options={**mock_config_entry.options, CONF_LLM_HASS_API: "invalid_llm_api"}, @@ -563,73 +422,6 @@ async def test_invalid_llm_api( ) -async def test_template_error( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test that template error handling works.""" - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - "prompt": "talk like a {% if True %}smarthome{% else %}pirate please.", - }, - ) - with patch("google.generativeai.GenerativeModel"): - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - result = await conversation.async_converse( - hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR, result - assert result.response.error_code == "unknown", result - - -async def test_template_variables( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test that template variables work.""" - context = Context(user_id="12345") - mock_user = MagicMock() - mock_user.id = "12345" - mock_user.name = "Test User" - - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - "prompt": ( - "The user name is {{ user_name }}. " - "The user id is {{ llm_context.context.user_id }}." - ), - }, - ) - with ( - patch("google.generativeai.GenerativeModel") as mock_model, - patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user), - ): - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - mock_chat = AsyncMock() - mock_model.return_value.start_chat.return_value = mock_chat - chat_response = MagicMock() - mock_chat.send_message_async.return_value = chat_response - mock_part = MagicMock() - mock_part.text = "Model response" - mock_part.function_call = None - chat_response.parts = [mock_part] - result = await conversation.async_converse( - hass, "hello", None, context, agent_id=mock_config_entry.entry_id - ) - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, ( - result - ) - assert ( - "The user name is Test User." - in mock_model.mock_calls[0][2]["system_instruction"] - ) - assert "The user id is 12345." in mock_model.mock_calls[0][2]["system_instruction"] - - @pytest.mark.usefixtures("mock_init_component") async def test_conversation_agent( hass: HomeAssistant, mock_config_entry: MockConfigEntry