Add prompt as constant and common translation key (#148896)

This commit is contained in:
Joost Lekkerkerker 2025-07-16 18:23:38 +02:00 committed by GitHub
parent e2340314c6
commit a5f0f6c8b9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 8 additions and 6 deletions

View File

@ -29,7 +29,7 @@
"set_options": {
"data": {
"name": "[%key:common::config_flow::data::name%]",
"prompt": "Instructions",
"prompt": "[%key:common::config_flow::data::prompt%]",
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "Maximum tokens to return in response",
"temperature": "Temperature",

View File

@ -34,7 +34,7 @@
"data": {
"name": "[%key:common::config_flow::data::name%]",
"recommended": "Recommended model settings",
"prompt": "Instructions",
"prompt": "[%key:common::config_flow::data::prompt%]",
"chat_model": "[%key:common::generic::model%]",
"temperature": "Temperature",
"top_p": "Top P",
@ -72,7 +72,7 @@
"data": {
"name": "[%key:common::config_flow::data::name%]",
"recommended": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::recommended%]",
"prompt": "Instructions",
"prompt": "[%key:common::config_flow::data::prompt%]",
"chat_model": "[%key:common::generic::model%]",
"temperature": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::temperature%]",
"top_p": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::top_p%]",

View File

@ -28,7 +28,7 @@
"data": {
"model": "Model",
"name": "[%key:common::config_flow::data::name%]",
"prompt": "Instructions",
"prompt": "[%key:common::config_flow::data::prompt%]",
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
"max_history": "Max history messages",
"num_ctx": "Context window size",
@ -67,7 +67,7 @@
"data": {
"model": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::model%]",
"name": "[%key:common::config_flow::data::name%]",
"prompt": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::prompt%]",
"prompt": "[%key:common::config_flow::data::prompt%]",
"max_history": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::max_history%]",
"num_ctx": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::num_ctx%]",
"keep_alive": "[%key:component::ollama::config_subentries::conversation::step::set_options::data::keep_alive%]",

View File

@ -28,7 +28,7 @@
"init": {
"data": {
"name": "[%key:common::config_flow::data::name%]",
"prompt": "Instructions",
"prompt": "[%key:common::config_flow::data::prompt%]",
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
"recommended": "Recommended model settings"
},

View File

@ -245,6 +245,7 @@ CONF_PLATFORM: Final = "platform"
CONF_PORT: Final = "port"
CONF_PREFIX: Final = "prefix"
CONF_PROFILE_NAME: Final = "profile_name"
CONF_PROMPT: Final = "prompt"
CONF_PROTOCOL: Final = "protocol"
CONF_PROXY_SSL: Final = "proxy_ssl"
CONF_QUOTE: Final = "quote"

View File

@ -65,6 +65,7 @@
"path": "Path",
"pin": "PIN code",
"port": "Port",
"prompt": "Instructions",
"ssl": "Uses an SSL certificate",
"url": "URL",
"usb_path": "USB device path",