mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 13:17:32 +00:00
Merge branch 'dev' into prepare_protobuf6
This commit is contained in:
commit
0226a6fecd
@ -20,7 +20,7 @@
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.5",
|
||||
"bluetooth-data-tools==1.26.1",
|
||||
"dbus-fast==2.41.1",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.37.0"
|
||||
]
|
||||
}
|
||||
|
@ -245,6 +245,10 @@ class CloudLoginView(HomeAssistantView):
|
||||
name = "api:cloud:login"
|
||||
|
||||
@require_admin
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle login request."""
|
||||
return await self._post(request)
|
||||
|
||||
@_handle_cloud_errors
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
@ -259,7 +263,7 @@ class CloudLoginView(HomeAssistantView):
|
||||
)
|
||||
)
|
||||
)
|
||||
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
async def _post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
"""Handle login request."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
@ -316,8 +320,12 @@ class CloudLogoutView(HomeAssistantView):
|
||||
name = "api:cloud:logout"
|
||||
|
||||
@require_admin
|
||||
@_handle_cloud_errors
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle logout request."""
|
||||
return await self._post(request)
|
||||
|
||||
@_handle_cloud_errors
|
||||
async def _post(self, request: web.Request) -> web.Response:
|
||||
"""Handle logout request."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
@ -400,9 +408,13 @@ class CloudForgotPasswordView(HomeAssistantView):
|
||||
name = "api:cloud:forgot_password"
|
||||
|
||||
@require_admin
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle forgot password request."""
|
||||
return await self._post(request)
|
||||
|
||||
@_handle_cloud_errors
|
||||
@RequestDataValidator(vol.Schema({vol.Required("email"): str}))
|
||||
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
async def _post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
"""Handle forgot password request."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.5"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.23"]
|
||||
}
|
||||
|
@ -7,7 +7,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pydeconz"],
|
||||
"requirements": ["pydeconz==118"],
|
||||
"requirements": ["pydeconz==120"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Royal Philips Electronics",
|
||||
|
@ -7,7 +7,7 @@
|
||||
},
|
||||
"recorder_untracked": {
|
||||
"title": "Entity not tracked",
|
||||
"description": "The recorder has been configured to exclude these configured entities:"
|
||||
"description": "Home Assistant Recorder has been configured to exclude these configured entities:"
|
||||
},
|
||||
"entity_unavailable": {
|
||||
"title": "Entity unavailable",
|
||||
|
@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["gassist-text==0.0.11"],
|
||||
"requirements": ["gassist-text==0.0.12"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -44,6 +44,7 @@ from .const import (
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
@ -51,6 +52,7 @@ from .const import (
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
|
||||
TIMEOUT_MILLIS,
|
||||
)
|
||||
|
||||
@ -341,6 +343,13 @@ async def google_generative_ai_config_option_schema(
|
||||
},
|
||||
default=RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
): harm_block_thresholds_selector,
|
||||
vol.Optional(
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
description={
|
||||
"suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL),
|
||||
},
|
||||
default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
@ -22,5 +22,7 @@ CONF_HATE_BLOCK_THRESHOLD = "hate_block_threshold"
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD = "sexual_block_threshold"
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD = "dangerous_block_threshold"
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD = "BLOCK_MEDIUM_AND_ABOVE"
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL = "enable_google_search_tool"
|
||||
RECOMMENDED_USE_GOOGLE_SEARCH_TOOL = False
|
||||
|
||||
TIMEOUT_MILLIS = 10000
|
||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from collections.abc import Callable
|
||||
from dataclasses import replace
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from google.genai.errors import APIError
|
||||
@ -12,6 +13,7 @@ from google.genai.types import (
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
GoogleSearch,
|
||||
HarmCategory,
|
||||
Part,
|
||||
SafetySetting,
|
||||
@ -39,6 +41,7 @@ from .const import (
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
@ -296,6 +299,13 @@ class GoogleGenerativeAIConversationEntity(
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
# Using search grounding allows the model to retrieve information from the web,
|
||||
# however, it may interfere with how the model decides to use some tools, or entities
|
||||
# for example weather entity may be disregarded if the model chooses to Google it.
|
||||
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Gemini 1.0 doesn't support system_instruction while 1.5 does.
|
||||
# Assume future versions will support it (if not, the request fails with a
|
||||
@ -324,6 +334,14 @@ class GoogleGenerativeAIConversationEntity(
|
||||
tool_results.append(chat_content)
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(chat_content, conversation.ToolResultContent)
|
||||
and chat_content.content == ""
|
||||
):
|
||||
# Skipping is not possible since the number of function calls need to match the number of function responses
|
||||
# and skipping one would mean removing the other and hence this would prevent a proper chat log
|
||||
chat_content = replace(chat_content, content=" ")
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
tool_results.clear()
|
||||
|
@ -36,7 +36,8 @@
|
||||
"harassment_block_threshold": "Negative or harmful comments targeting identity and/or protected attributes",
|
||||
"hate_block_threshold": "Content that is rude, disrespectful, or profane",
|
||||
"sexual_block_threshold": "Contains references to sexual acts or other lewd content",
|
||||
"dangerous_block_threshold": "Promotes, facilitates, or encourages harmful acts"
|
||||
"dangerous_block_threshold": "Promotes, facilitates, or encourages harmful acts",
|
||||
"enable_google_search_tool": "Enable Google Search tool"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
|
@ -2,7 +2,7 @@
|
||||
"services": {
|
||||
"virtualkey": {
|
||||
"name": "Virtual key",
|
||||
"description": "Presses a virtual key from CCU/Homegear or simulate keypress.",
|
||||
"description": "Simulates a keypress (or other valid action) on CCU/Homegear with virtual or device keys.",
|
||||
"fields": {
|
||||
"address": {
|
||||
"name": "Address",
|
||||
@ -24,7 +24,7 @@
|
||||
},
|
||||
"set_variable_value": {
|
||||
"name": "Set variable value",
|
||||
"description": "Sets the name of a node.",
|
||||
"description": "Sets the value of a system variable.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
|
@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2025.3.1"]
|
||||
"requirements": ["aioautomower==2025.3.2"]
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ async def async_set_work_area_cutting_height(
|
||||
) -> None:
|
||||
"""Set cutting height for work area."""
|
||||
await coordinator.api.commands.workarea_settings(
|
||||
mower_id, int(cheight), work_area_id
|
||||
mower_id, work_area_id, cutting_height=int(cheight)
|
||||
)
|
||||
|
||||
|
||||
|
@ -17,6 +17,7 @@ from homeassistant.config_entries import (
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
@ -80,30 +81,30 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(data=self.config_entry.options | user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="options_1",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required("section_1"): data_entry_flow.section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_BOOLEAN,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_BOOLEAN, False
|
||||
),
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_INT,
|
||||
default=self.config_entry.options.get(CONF_INT, 10),
|
||||
): int,
|
||||
}
|
||||
),
|
||||
{"collapsed": False},
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required("section_1"): data_entry_flow.section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_BOOLEAN,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_BOOLEAN, False
|
||||
),
|
||||
): bool,
|
||||
vol.Optional(CONF_INT): cv.positive_int,
|
||||
}
|
||||
),
|
||||
}
|
||||
),
|
||||
{"collapsed": False},
|
||||
),
|
||||
}
|
||||
)
|
||||
self.add_suggested_values_to_schema(
|
||||
data_schema,
|
||||
{"section_1": {"int": self.config_entry.options.get(CONF_INT, 10)}},
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="options_1", data_schema=data_schema)
|
||||
|
||||
|
||||
class SubentryFlowHandler(ConfigSubentryFlow):
|
||||
@ -146,7 +147,7 @@ class SubentryFlowHandler(ConfigSubentryFlow):
|
||||
if user_input is not None:
|
||||
title = user_input.pop("name")
|
||||
return self.async_update_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
data=user_input,
|
||||
title=title,
|
||||
|
@ -4,13 +4,13 @@ from collections.abc import Callable, Coroutine
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from linkplay.bridge import LinkPlayBridge
|
||||
from linkplay.manufacturers import MANUFACTURER_GENERIC, get_info_from_project
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import DOMAIN, LinkPlayRequestException
|
||||
from .utils import MANUFACTURER_GENERIC, get_info_from_project
|
||||
|
||||
|
||||
def exception_wrap[_LinkPlayEntityT: LinkPlayBaseEntity, **_P, _R](
|
||||
|
@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["linkplay"],
|
||||
"requirements": ["python-linkplay==0.2.0"],
|
||||
"requirements": ["python-linkplay==0.2.1"],
|
||||
"zeroconf": ["_linkplay._tcp.local."]
|
||||
}
|
||||
|
@ -1,7 +1,5 @@
|
||||
"""Utilities for the LinkPlay component."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from linkplay.utils import async_create_unverified_client_session
|
||||
|
||||
@ -10,75 +8,6 @@ from homeassistant.core import Event, HomeAssistant, callback
|
||||
|
||||
from .const import DATA_SESSION, DOMAIN
|
||||
|
||||
MANUFACTURER_ARTSOUND: Final[str] = "ArtSound"
|
||||
MANUFACTURER_ARYLIC: Final[str] = "Arylic"
|
||||
MANUFACTURER_IEAST: Final[str] = "iEAST"
|
||||
MANUFACTURER_WIIM: Final[str] = "WiiM"
|
||||
MANUFACTURER_GGMM: Final[str] = "GGMM"
|
||||
MANUFACTURER_MEDION: Final[str] = "Medion"
|
||||
MANUFACTURER_GENERIC: Final[str] = "Generic"
|
||||
MODELS_ARTSOUND_SMART_ZONE4: Final[str] = "Smart Zone 4 AMP"
|
||||
MODELS_ARTSOUND_SMART_HYDE: Final[str] = "Smart Hyde"
|
||||
MODELS_ARYLIC_S50: Final[str] = "S50+"
|
||||
MODELS_ARYLIC_S50_PRO: Final[str] = "S50 Pro"
|
||||
MODELS_ARYLIC_A30: Final[str] = "A30"
|
||||
MODELS_ARYLIC_A50: Final[str] = "A50"
|
||||
MODELS_ARYLIC_A50S: Final[str] = "A50+"
|
||||
MODELS_ARYLIC_UP2STREAM_AMP: Final[str] = "Up2Stream Amp 2.0"
|
||||
MODELS_ARYLIC_UP2STREAM_AMP_2P1: Final[str] = "Up2Stream Amp 2.1"
|
||||
MODELS_ARYLIC_UP2STREAM_AMP_V3: Final[str] = "Up2Stream Amp v3"
|
||||
MODELS_ARYLIC_UP2STREAM_AMP_V4: Final[str] = "Up2Stream Amp v4"
|
||||
MODELS_ARYLIC_UP2STREAM_PRO: Final[str] = "Up2Stream Pro v1"
|
||||
MODELS_ARYLIC_UP2STREAM_PRO_V3: Final[str] = "Up2Stream Pro v3"
|
||||
MODELS_ARYLIC_S10P: Final[str] = "Arylic S10+"
|
||||
MODELS_ARYLIC_UP2STREAM_PLATE_AMP: Final[str] = "Up2Stream Plate Amp"
|
||||
MODELS_IEAST_AUDIOCAST_M5: Final[str] = "AudioCast M5"
|
||||
MODELS_WIIM_AMP: Final[str] = "WiiM Amp"
|
||||
MODELS_WIIM_MINI: Final[str] = "WiiM Mini"
|
||||
MODELS_GGMM_GGMM_E2: Final[str] = "GGMM E2"
|
||||
MODELS_MEDION_MD_43970: Final[str] = "Life P66970 (MD 43970)"
|
||||
MODELS_GENERIC: Final[str] = "Generic"
|
||||
|
||||
PROJECTID_LOOKUP: Final[dict[str, tuple[str, str]]] = {
|
||||
"SMART_ZONE4_AMP": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4),
|
||||
"SMART_HYDE": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE),
|
||||
"ARYLIC_S50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50),
|
||||
"RP0016_S50PRO_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO),
|
||||
"RP0011_WB60_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30),
|
||||
"X-50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50),
|
||||
"ARYLIC_A50S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S),
|
||||
"RP0011_WB60": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP),
|
||||
"UP2STREAM_AMP_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3),
|
||||
"UP2STREAM_AMP_V4": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4),
|
||||
"UP2STREAM_PRO_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3),
|
||||
"S10P_WIFI": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S10P),
|
||||
"ARYLIC_V20": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PLATE_AMP),
|
||||
"UP2STREAM_MINI_V3": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"UP2STREAM_AMP_2P1": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_2P1),
|
||||
"RP0014_A50C_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"ARYLIC_A30": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"ARYLIC_SUBWOOFER": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"ARYLIC_S50A": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"RP0010_D5_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"RP0001": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"RP0013_WA31S": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"RP0010_D5": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"RP0013_WA31S_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"RP0014_A50D_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"ARYLIC_A50TE": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"ARYLIC_A50N": (MANUFACTURER_ARYLIC, MODELS_GENERIC),
|
||||
"iEAST-02": (MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5),
|
||||
"WiiM_Amp_4layer": (MANUFACTURER_WIIM, MODELS_WIIM_AMP),
|
||||
"Muzo_Mini": (MANUFACTURER_WIIM, MODELS_WIIM_MINI),
|
||||
"GGMM_E2A": (MANUFACTURER_GGMM, MODELS_GGMM_GGMM_E2),
|
||||
"A16": (MANUFACTURER_MEDION, MODELS_MEDION_MD_43970),
|
||||
}
|
||||
|
||||
|
||||
def get_info_from_project(project: str) -> tuple[str, str]:
|
||||
"""Get manufacturer and model info based on given project."""
|
||||
return PROJECTID_LOOKUP.get(project, (MANUFACTURER_GENERIC, MODELS_GENERIC))
|
||||
|
||||
|
||||
async def async_get_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
"""Get a ClientSession that can be used with LinkPlay devices."""
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mcp",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["mcp==1.1.2"]
|
||||
"requirements": ["mcp==1.5.0"]
|
||||
}
|
||||
|
@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["mcp==1.1.2", "aiohttp_sse==2.2.0", "anyio==4.9.0"],
|
||||
"requirements": ["mcp==1.5.0", "aiohttp_sse==2.2.0", "anyio==4.9.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ async def create_server(
|
||||
if llm_api_id == STATELESS_LLM_API:
|
||||
llm_api_id = llm.LLM_API_ASSIST
|
||||
|
||||
server = Server("home-assistant")
|
||||
server = Server[Any]("home-assistant")
|
||||
|
||||
async def get_api_instance() -> llm.APIInstance:
|
||||
"""Get the LLM API selected."""
|
||||
|
@ -1176,7 +1176,7 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Save the changes made to the subentry."""
|
||||
entry = self._get_reconfigure_entry()
|
||||
entry = self._get_entry()
|
||||
subentry = self._get_reconfigure_subentry()
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectorError, ClientError
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
from nettigo_air_monitor import (
|
||||
ApiError,
|
||||
AuthFailedError,
|
||||
@ -38,15 +38,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: NAMConfigEntry) -> bool:
|
||||
options = ConnectionOptions(host=host, username=username, password=password)
|
||||
try:
|
||||
nam = await NettigoAirMonitor.create(websession, options)
|
||||
except (ApiError, ClientError, ClientConnectorError, TimeoutError) as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except (ApiError, ClientError) as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_communication_error",
|
||||
translation_placeholders={"device": entry.title},
|
||||
) from err
|
||||
|
||||
try:
|
||||
await nam.async_check_credentials()
|
||||
except ApiError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except (ApiError, ClientError) as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_communication_error",
|
||||
translation_placeholders={"device": entry.title},
|
||||
) from err
|
||||
except AuthFailedError as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_error",
|
||||
translation_placeholders={"device": entry.title},
|
||||
) from err
|
||||
|
||||
coordinator = NAMDataUpdateCoordinator(hass, entry, nam)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
@ -4,6 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
from nettigo_air_monitor import ApiError, AuthFailedError
|
||||
|
||||
from homeassistant.components.button import (
|
||||
ButtonDeviceClass,
|
||||
ButtonEntity,
|
||||
@ -11,9 +14,11 @@ from homeassistant.components.button import (
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import NAMConfigEntry, NAMDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@ -59,4 +64,16 @@ class NAMButton(CoordinatorEntity[NAMDataUpdateCoordinator], ButtonEntity):
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Triggers the restart."""
|
||||
await self.coordinator.nam.async_restart()
|
||||
try:
|
||||
await self.coordinator.nam.async_restart()
|
||||
except (ApiError, ClientError) as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_communication_action_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
"device": self.coordinator.config_entry.title,
|
||||
},
|
||||
) from err
|
||||
except AuthFailedError:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
|
@ -64,6 +64,10 @@ class NAMDataUpdateCoordinator(DataUpdateCoordinator[NAMSensors]):
|
||||
# We do not need to catch AuthFailed exception here because sensor data is
|
||||
# always available without authorization.
|
||||
except (ApiError, InvalidSensorDataError, RetryError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={"device": self.config_entry.title},
|
||||
) from error
|
||||
|
||||
return data
|
||||
|
@ -205,5 +205,19 @@
|
||||
"name": "Last restart"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_error": {
|
||||
"message": "Authentication failed for {device}, please update your credentials"
|
||||
},
|
||||
"device_communication_error": {
|
||||
"message": "An error occurred while communicating with {device}"
|
||||
},
|
||||
"device_communication_action_error": {
|
||||
"message": "An error occurred while calling action for {entity} for {device}"
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from {device}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,6 +17,8 @@ from .entity import NUTBaseEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@ -1,5 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"outlet_number_load_cycle": {
|
||||
"default": "mdi:restart"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"ambient_humidity_status": {
|
||||
"default": "mdi:information-outline"
|
||||
@ -152,11 +157,6 @@
|
||||
"default": "mdi:information-outline"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"outlet_number_load_cycle": {
|
||||
"default": "mdi:restart"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"outlet_number_load_poweronoff": {
|
||||
"default": "mdi:power"
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -29,8 +29,8 @@
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Connection error: {error}",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
@ -78,6 +78,9 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"outlet_number_load_cycle": { "name": "Power cycle outlet {outlet_name}" }
|
||||
},
|
||||
"sensor": {
|
||||
"ambient_humidity": { "name": "Ambient humidity" },
|
||||
"ambient_humidity_status": { "name": "Ambient humidity status" },
|
||||
@ -106,43 +109,40 @@
|
||||
"battery_voltage_low": { "name": "Low battery voltage" },
|
||||
"battery_voltage_nominal": { "name": "Nominal battery voltage" },
|
||||
"input_bypass_current": { "name": "Input bypass current" },
|
||||
"input_bypass_l1_current": { "name": "Input bypass L1 current" },
|
||||
"input_bypass_l2_current": { "name": "Input bypass L2 current" },
|
||||
"input_bypass_l3_current": { "name": "Input bypass L3 current" },
|
||||
"input_bypass_voltage": { "name": "Input bypass voltage" },
|
||||
"input_bypass_l1_n_voltage": { "name": "Input bypass L1-N voltage" },
|
||||
"input_bypass_l2_n_voltage": { "name": "Input bypass L2-N voltage" },
|
||||
"input_bypass_l3_n_voltage": { "name": "Input bypass L3-N voltage" },
|
||||
"input_bypass_frequency": { "name": "Input bypass frequency" },
|
||||
"input_bypass_l1_current": { "name": "Input bypass L1 current" },
|
||||
"input_bypass_l1_n_voltage": { "name": "Input bypass L1-N voltage" },
|
||||
"input_bypass_l1_realpower": { "name": "Input bypass L1 real power" },
|
||||
"input_bypass_l2_current": { "name": "Input bypass L2 current" },
|
||||
"input_bypass_l2_n_voltage": { "name": "Input bypass L2-N voltage" },
|
||||
"input_bypass_l2_realpower": { "name": "Input bypass L2 real power" },
|
||||
"input_bypass_l3_current": { "name": "Input bypass L3 current" },
|
||||
"input_bypass_l3_n_voltage": { "name": "Input bypass L3-N voltage" },
|
||||
"input_bypass_l3_realpower": { "name": "Input bypass L3 real power" },
|
||||
"input_bypass_phases": { "name": "Input bypass phases" },
|
||||
"input_bypass_realpower": { "name": "Input bypass real power" },
|
||||
"input_bypass_l1_realpower": {
|
||||
"name": "Input bypass L1 real power"
|
||||
},
|
||||
"input_bypass_l2_realpower": {
|
||||
"name": "Input bypass L2 real power"
|
||||
},
|
||||
"input_bypass_l3_realpower": {
|
||||
"name": "Input bypass L3 real power"
|
||||
},
|
||||
"input_bypass_voltage": { "name": "Input bypass voltage" },
|
||||
"input_current": { "name": "Input current" },
|
||||
"input_current_status": { "name": "Input current status" },
|
||||
"input_l1_current": { "name": "Input L1 current" },
|
||||
"input_l2_current": { "name": "Input L2 current" },
|
||||
"input_l3_current": { "name": "Input L3 current" },
|
||||
"input_frequency": { "name": "Input frequency" },
|
||||
"input_frequency_nominal": { "name": "Input nominal frequency" },
|
||||
"input_frequency_status": { "name": "Input frequency status" },
|
||||
"input_l1_current": { "name": "Input L1 current" },
|
||||
"input_l1_frequency": { "name": "Input L1 line frequency" },
|
||||
"input_l1_n_voltage": { "name": "Input L1 voltage" },
|
||||
"input_l1_realpower": { "name": "Input L1 real power" },
|
||||
"input_l2_current": { "name": "Input L2 current" },
|
||||
"input_l2_frequency": { "name": "Input L2 line frequency" },
|
||||
"input_l2_n_voltage": { "name": "Input L2 voltage" },
|
||||
"input_l2_realpower": { "name": "Input L2 real power" },
|
||||
"input_l3_current": { "name": "Input L3 current" },
|
||||
"input_l3_frequency": { "name": "Input L3 line frequency" },
|
||||
"input_l3_n_voltage": { "name": "Input L3 voltage" },
|
||||
"input_l3_realpower": { "name": "Input L3 real power" },
|
||||
"input_load": { "name": "Input load" },
|
||||
"input_phases": { "name": "Input phases" },
|
||||
"input_power": { "name": "Input power" },
|
||||
"input_realpower": { "name": "Input real power" },
|
||||
"input_l1_realpower": { "name": "Input L1 real power" },
|
||||
"input_l2_realpower": { "name": "Input L2 real power" },
|
||||
"input_l3_realpower": { "name": "Input L3 real power" },
|
||||
"input_load": { "name": "Input load" },
|
||||
"input_sensitivity": { "name": "Input power sensitivity" },
|
||||
"input_transfer_high": { "name": "High voltage transfer" },
|
||||
"input_transfer_low": { "name": "Low voltage transfer" },
|
||||
@ -150,9 +150,6 @@
|
||||
"input_voltage": { "name": "Input voltage" },
|
||||
"input_voltage_nominal": { "name": "Nominal input voltage" },
|
||||
"input_voltage_status": { "name": "Input voltage status" },
|
||||
"input_l1_n_voltage": { "name": "Input L1 voltage" },
|
||||
"input_l2_n_voltage": { "name": "Input L2 voltage" },
|
||||
"input_l3_n_voltage": { "name": "Input L3 voltage" },
|
||||
"outlet_number_current": { "name": "Outlet {outlet_name} current" },
|
||||
"outlet_number_current_status": {
|
||||
"name": "Outlet {outlet_name} current status"
|
||||
@ -163,27 +160,27 @@
|
||||
"outlet_voltage": { "name": "Outlet voltage" },
|
||||
"output_current": { "name": "Output current" },
|
||||
"output_current_nominal": { "name": "Nominal output current" },
|
||||
"output_l1_current": { "name": "Output L1 current" },
|
||||
"output_l2_current": { "name": "Output L2 current" },
|
||||
"output_l3_current": { "name": "Output L3 current" },
|
||||
"output_frequency": { "name": "Output frequency" },
|
||||
"output_frequency_nominal": { "name": "Nominal output frequency" },
|
||||
"output_l1_current": { "name": "Output L1 current" },
|
||||
"output_l1_n_voltage": { "name": "Output L1-N voltage" },
|
||||
"output_l1_power_percent": { "name": "Output L1 power usage" },
|
||||
"output_l1_realpower": { "name": "Output L1 real power" },
|
||||
"output_l2_current": { "name": "Output L2 current" },
|
||||
"output_l2_n_voltage": { "name": "Output L2-N voltage" },
|
||||
"output_l2_power_percent": { "name": "Output L2 power usage" },
|
||||
"output_l2_realpower": { "name": "Output L2 real power" },
|
||||
"output_l3_current": { "name": "Output L3 current" },
|
||||
"output_l3_n_voltage": { "name": "Output L3-N voltage" },
|
||||
"output_l3_power_percent": { "name": "Output L3 power usage" },
|
||||
"output_l3_realpower": { "name": "Output L3 real power" },
|
||||
"output_phases": { "name": "Output phases" },
|
||||
"output_power": { "name": "Output apparent power" },
|
||||
"output_l2_power_percent": { "name": "Output L2 power usage" },
|
||||
"output_l1_power_percent": { "name": "Output L1 power usage" },
|
||||
"output_l3_power_percent": { "name": "Output L3 power usage" },
|
||||
"output_power_nominal": { "name": "Nominal output power" },
|
||||
"output_realpower": { "name": "Output real power" },
|
||||
"output_realpower_nominal": { "name": "Nominal output real power" },
|
||||
"output_l1_realpower": { "name": "Output L1 real power" },
|
||||
"output_l2_realpower": { "name": "Output L2 real power" },
|
||||
"output_l3_realpower": { "name": "Output L3 real power" },
|
||||
"output_voltage": { "name": "Output voltage" },
|
||||
"output_voltage_nominal": { "name": "Nominal output voltage" },
|
||||
"output_l1_n_voltage": { "name": "Output L1-N voltage" },
|
||||
"output_l2_n_voltage": { "name": "Output L2-N voltage" },
|
||||
"output_l3_n_voltage": { "name": "Output L3-N voltage" },
|
||||
"ups_alarm": { "name": "Alarms" },
|
||||
"ups_beeper_status": { "name": "Beeper status" },
|
||||
"ups_contacts": { "name": "External contacts" },
|
||||
@ -218,9 +215,6 @@
|
||||
"ups_watchdog_status": { "name": "Watchdog status" },
|
||||
"watts": { "name": "Watts" }
|
||||
},
|
||||
"button": {
|
||||
"outlet_number_load_cycle": { "name": "Power cycle outlet {outlet_name}" }
|
||||
},
|
||||
"switch": {
|
||||
"outlet_number_load_poweronoff": { "name": "Power outlet {outlet_name}" }
|
||||
}
|
||||
|
@ -18,6 +18,8 @@ from .entity import NUTBaseEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@ -60,6 +60,7 @@ async def async_setup(
|
||||
hass.http.register_view(BackupInfoView(data))
|
||||
hass.http.register_view(RestoreBackupView(data))
|
||||
hass.http.register_view(UploadBackupView(data))
|
||||
setup_cloud_views(hass, data)
|
||||
|
||||
|
||||
class OnboardingView(HomeAssistantView):
|
||||
@ -429,6 +430,115 @@ class UploadBackupView(BackupOnboardingView, backup_http.UploadBackupView):
|
||||
return await self._post(request)
|
||||
|
||||
|
||||
def setup_cloud_views(hass: HomeAssistant, data: OnboardingStoreData) -> None:
|
||||
"""Set up the cloud views."""
|
||||
|
||||
# The cloud integration is imported locally to avoid cloud being imported by
|
||||
# bootstrap.py and to avoid circular imports.
|
||||
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from homeassistant.components.cloud import http_api as cloud_http
|
||||
|
||||
# pylint: disable-next=import-outside-toplevel,hass-component-root-import
|
||||
from homeassistant.components.cloud.const import DATA_CLOUD
|
||||
|
||||
class CloudOnboardingView(HomeAssistantView):
|
||||
"""Cloud onboarding view."""
|
||||
|
||||
requires_auth = False
|
||||
|
||||
def __init__(self, data: OnboardingStoreData) -> None:
|
||||
"""Initialize the view."""
|
||||
self._data = data
|
||||
|
||||
def with_cloud[_ViewT: CloudOnboardingView, **_P](
|
||||
func: Callable[
|
||||
Concatenate[_ViewT, web.Request, _P],
|
||||
Coroutine[Any, Any, web.Response],
|
||||
],
|
||||
) -> Callable[
|
||||
Concatenate[_ViewT, web.Request, _P], Coroutine[Any, Any, web.Response]
|
||||
]:
|
||||
"""Home Assistant API decorator to check onboarding and cloud."""
|
||||
|
||||
@wraps(func)
|
||||
async def _with_cloud(
|
||||
self: _ViewT,
|
||||
request: web.Request,
|
||||
*args: _P.args,
|
||||
**kwargs: _P.kwargs,
|
||||
) -> web.Response:
|
||||
"""Check onboarding status, cloud and call function."""
|
||||
if self._data["done"]:
|
||||
# If at least one onboarding step is done, we don't allow accessing
|
||||
# the cloud onboarding views.
|
||||
raise HTTPUnauthorized
|
||||
|
||||
hass = request.app[KEY_HASS]
|
||||
if DATA_CLOUD not in hass.data:
|
||||
return self.json(
|
||||
{"code": "cloud_disabled"},
|
||||
status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
return await func(self, request, *args, **kwargs)
|
||||
|
||||
return _with_cloud
|
||||
|
||||
class CloudForgotPasswordView(
|
||||
CloudOnboardingView, cloud_http.CloudForgotPasswordView
|
||||
):
|
||||
"""View to start Forgot Password flow."""
|
||||
|
||||
url = "/api/onboarding/cloud/forgot_password"
|
||||
name = "api:onboarding:cloud:forgot_password"
|
||||
|
||||
@with_cloud
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle forgot password request."""
|
||||
return await super()._post(request)
|
||||
|
||||
class CloudLoginView(CloudOnboardingView, cloud_http.CloudLoginView):
|
||||
"""Login to Home Assistant Cloud."""
|
||||
|
||||
url = "/api/onboarding/cloud/login"
|
||||
name = "api:onboarding:cloud:login"
|
||||
|
||||
@with_cloud
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle login request."""
|
||||
return await super()._post(request)
|
||||
|
||||
class CloudLogoutView(CloudOnboardingView, cloud_http.CloudLogoutView):
|
||||
"""Log out of the Home Assistant cloud."""
|
||||
|
||||
url = "/api/onboarding/cloud/logout"
|
||||
name = "api:onboarding:cloud:logout"
|
||||
|
||||
@with_cloud
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle logout request."""
|
||||
return await super()._post(request)
|
||||
|
||||
class CloudStatusView(CloudOnboardingView):
|
||||
"""Get cloud status view."""
|
||||
|
||||
url = "/api/onboarding/cloud/status"
|
||||
name = "api:onboarding:cloud:status"
|
||||
|
||||
@with_cloud
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Return cloud status."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
return self.json({"logged_in": cloud.is_logged_in})
|
||||
|
||||
hass.http.register_view(CloudForgotPasswordView(data))
|
||||
hass.http.register_view(CloudLoginView(data))
|
||||
hass.http.register_view(CloudLogoutView(data))
|
||||
hass.http.register_view(CloudStatusView(data))
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_hass_provider(hass: HomeAssistant) -> HassAuthProvider:
|
||||
"""Get the Home Assistant auth provider."""
|
||||
|
@ -140,14 +140,14 @@
|
||||
"device_selection": "[%key:component::onewire::options::error::device_not_selected%]"
|
||||
},
|
||||
"description": "Select what configuration steps to process",
|
||||
"title": "OneWire Device Options"
|
||||
"title": "1-Wire device options"
|
||||
},
|
||||
"configure_device": {
|
||||
"data": {
|
||||
"precision": "Sensor Precision"
|
||||
"precision": "Sensor precision"
|
||||
},
|
||||
"description": "Select sensor precision for {sensor_id}",
|
||||
"title": "OneWire Sensor Precision"
|
||||
"title": "1-Wire sensor precision"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -43,15 +43,15 @@
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entities to remove",
|
||||
"description": "List of entities for which the data is to be removed from the recorder database."
|
||||
"description": "List of entities for which the data is to be removed from the Recorder database."
|
||||
},
|
||||
"domains": {
|
||||
"name": "Domains to remove",
|
||||
"description": "List of domains for which the data needs to be removed from the recorder database."
|
||||
"description": "List of domains for which the data needs to be removed from the Recorder database."
|
||||
},
|
||||
"entity_globs": {
|
||||
"name": "Entity globs to remove",
|
||||
"description": "List of glob patterns used to select the entities for which the data is to be removed from the recorder database."
|
||||
"description": "List of glob patterns used to select the entities for which the data is to be removed from the Recorder database."
|
||||
},
|
||||
"keep_days": {
|
||||
"name": "[%key:component::recorder::services::purge::fields::keep_days::name%]",
|
||||
|
@ -24,7 +24,7 @@
|
||||
"event_handled": "Send handled events",
|
||||
"event_third_party_packages": "Send events from third-party packages",
|
||||
"logging_event_level": "The log level Sentry will register an event for",
|
||||
"logging_level": "The log level Sentry will record logs as breadcrums for",
|
||||
"logging_level": "The log level Sentry will record events as breadcrumbs for",
|
||||
"tracing": "Enable performance tracing",
|
||||
"tracing_sample_rate": "Tracing sample rate; between 0.0 and 1.0 (1.0 = 100%)"
|
||||
}
|
||||
|
@ -25,7 +25,7 @@
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"title": "Confirm setup for Slide",
|
||||
"description": "Do you want to setup {host}?"
|
||||
"description": "Do you want to set up {host}?"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pysmartthings import Attribute, Capability, Category, SmartThings
|
||||
@ -35,6 +36,8 @@ class SmartThingsBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
is_on_key: str
|
||||
category_device_class: dict[Category | str, BinarySensorDeviceClass] | None = None
|
||||
category: set[Category] | None = None
|
||||
exists_fn: Callable[[str], bool] | None = None
|
||||
component_translation_key: dict[str, str] | None = None
|
||||
|
||||
|
||||
CAPABILITY_TO_SENSORS: dict[
|
||||
@ -58,6 +61,11 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
Category.DOOR: BinarySensorDeviceClass.DOOR,
|
||||
Category.WINDOW: BinarySensorDeviceClass.WINDOW,
|
||||
},
|
||||
exists_fn=lambda key: key in {"freezer", "cooler"},
|
||||
component_translation_key={
|
||||
"freezer": "freezer_door",
|
||||
"cooler": "cooler_door",
|
||||
},
|
||||
)
|
||||
},
|
||||
Capability.FILTER_STATUS: {
|
||||
@ -164,17 +172,18 @@ async def async_setup_entry(
|
||||
entry_data = entry.runtime_data
|
||||
async_add_entities(
|
||||
SmartThingsBinarySensor(
|
||||
entry_data.client,
|
||||
device,
|
||||
description,
|
||||
capability,
|
||||
attribute,
|
||||
entry_data.client, device, description, capability, attribute, component
|
||||
)
|
||||
for device in entry_data.devices.values()
|
||||
for capability, attribute_map in CAPABILITY_TO_SENSORS.items()
|
||||
if capability in device.status[MAIN]
|
||||
for attribute, description in attribute_map.items()
|
||||
if (
|
||||
for component in device.status
|
||||
if capability in device.status[component]
|
||||
and (
|
||||
component == MAIN
|
||||
or (description.exists_fn is not None and description.exists_fn(component))
|
||||
)
|
||||
and (
|
||||
not description.category
|
||||
or get_main_component_category(device) in description.category
|
||||
)
|
||||
@ -193,9 +202,10 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity):
|
||||
entity_description: SmartThingsBinarySensorEntityDescription,
|
||||
capability: Capability,
|
||||
attribute: Attribute,
|
||||
component: str,
|
||||
) -> None:
|
||||
"""Init the class."""
|
||||
super().__init__(client, device, {capability})
|
||||
super().__init__(client, device, {capability}, component=component)
|
||||
self._attribute = attribute
|
||||
self.capability = capability
|
||||
self.entity_description = entity_description
|
||||
@ -207,6 +217,19 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity):
|
||||
):
|
||||
self._attr_device_class = entity_description.category_device_class[category]
|
||||
self._attr_name = None
|
||||
if (
|
||||
entity_description.component_translation_key is not None
|
||||
and (
|
||||
translation_key := entity_description.component_translation_key.get(
|
||||
component
|
||||
)
|
||||
)
|
||||
is not None
|
||||
):
|
||||
self._attr_translation_key = translation_key
|
||||
self._attr_unique_id = (
|
||||
f"{device.device.device_id}_{component}_{capability}_{attribute}"
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
|
@ -39,6 +39,12 @@
|
||||
"filter_status": {
|
||||
"name": "Filter status"
|
||||
},
|
||||
"freezer_door": {
|
||||
"name": "Freezer door"
|
||||
},
|
||||
"cooler_door": {
|
||||
"name": "Cooler door"
|
||||
},
|
||||
"remote_control": {
|
||||
"name": "Remote control"
|
||||
},
|
||||
@ -51,7 +57,7 @@
|
||||
},
|
||||
"button": {
|
||||
"stop": {
|
||||
"name": "Stop"
|
||||
"name": "[%key:common::action::stop%]"
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
|
@ -73,7 +73,6 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = {
|
||||
),
|
||||
"temperature": SensorEntityDescription(
|
||||
key="temperature",
|
||||
name=None,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
|
@ -48,13 +48,13 @@
|
||||
"name": "Last boot"
|
||||
},
|
||||
"load_15m": {
|
||||
"name": "Load (15m)"
|
||||
"name": "Load (15 min)"
|
||||
},
|
||||
"load_1m": {
|
||||
"name": "Load (1m)"
|
||||
"name": "Load (1 min)"
|
||||
},
|
||||
"load_5m": {
|
||||
"name": "Load (5m)"
|
||||
"name": "Load (5 min)"
|
||||
},
|
||||
"memory_free": {
|
||||
"name": "Memory free"
|
||||
|
@ -18,7 +18,7 @@
|
||||
"location": "[%key:common::config_flow::data::location%]"
|
||||
},
|
||||
"data_description": {
|
||||
"location": "Equal or part of name, description or camera id. Be as specific as possible to avoid getting multiple cameras as result"
|
||||
"location": "Equal or part of name, description or camera ID. Be as specific as possible to avoid getting multiple cameras as result"
|
||||
}
|
||||
},
|
||||
"multiple_cameras": {
|
||||
@ -60,7 +60,7 @@
|
||||
"name": "[%key:common::config_flow::data::location%]"
|
||||
},
|
||||
"photo_url": {
|
||||
"name": "Photo url"
|
||||
"name": "Photo URL"
|
||||
},
|
||||
"status": {
|
||||
"name": "Status"
|
||||
@ -87,7 +87,7 @@
|
||||
"name": "Photo time"
|
||||
},
|
||||
"photo_url": {
|
||||
"name": "Photo url"
|
||||
"name": "Photo URL"
|
||||
},
|
||||
"status": {
|
||||
"name": "Status"
|
||||
|
@ -195,8 +195,6 @@ DISCOVERY_SOURCES = {
|
||||
SOURCE_ZEROCONF,
|
||||
}
|
||||
|
||||
RECONFIGURE_NOTIFICATION_ID = "config_entry_reconfigure"
|
||||
|
||||
EVENT_FLOW_DISCOVERED = "config_entry_discovered"
|
||||
|
||||
SIGNAL_CONFIG_ENTRY_CHANGED = SignalType["ConfigEntryChange", "ConfigEntry"](
|
||||
@ -1714,16 +1712,6 @@ class ConfigEntriesFlowManager(
|
||||
# Create notification.
|
||||
if source in DISCOVERY_SOURCES:
|
||||
await self._discovery_debouncer.async_call()
|
||||
elif source == SOURCE_REAUTH:
|
||||
persistent_notification.async_create(
|
||||
self.hass,
|
||||
title="Integration requires reconfiguration",
|
||||
message=(
|
||||
"At least one of your integrations requires reconfiguration to "
|
||||
"continue functioning. [Check it out](/config/integrations)."
|
||||
),
|
||||
notification_id=RECONFIGURE_NOTIFICATION_ID,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_discovery(self) -> None:
|
||||
@ -3119,29 +3107,6 @@ class ConfigFlow(ConfigEntryBaseFlow):
|
||||
"""Handle a flow initialized by discovery."""
|
||||
return await self._async_step_discovery_without_unique_id()
|
||||
|
||||
@callback
|
||||
def async_abort(
|
||||
self,
|
||||
*,
|
||||
reason: str,
|
||||
description_placeholders: Mapping[str, str] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort the config flow."""
|
||||
# Remove reauth notification if no reauth flows are in progress
|
||||
if self.source == SOURCE_REAUTH and not any(
|
||||
ent["flow_id"] != self.flow_id
|
||||
for ent in self.hass.config_entries.flow.async_progress_by_handler(
|
||||
self.handler, match_context={"source": SOURCE_REAUTH}
|
||||
)
|
||||
):
|
||||
persistent_notification.async_dismiss(
|
||||
self.hass, RECONFIGURE_NOTIFICATION_ID
|
||||
)
|
||||
|
||||
return super().async_abort(
|
||||
reason=reason, description_placeholders=description_placeholders
|
||||
)
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> ConfigFlowResult:
|
||||
@ -3491,18 +3456,14 @@ class ConfigSubentryFlow(
|
||||
return self.async_abort(reason="reconfigure_successful")
|
||||
|
||||
@property
|
||||
def _reconfigure_entry_id(self) -> str:
|
||||
"""Return reconfigure entry id."""
|
||||
if self.source != SOURCE_RECONFIGURE:
|
||||
raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}")
|
||||
def _entry_id(self) -> str:
|
||||
"""Return config entry id."""
|
||||
return self.handler[0]
|
||||
|
||||
@callback
|
||||
def _get_reconfigure_entry(self) -> ConfigEntry:
|
||||
"""Return the reconfigure config entry linked to the current context."""
|
||||
return self.hass.config_entries.async_get_known_entry(
|
||||
self._reconfigure_entry_id
|
||||
)
|
||||
def _get_entry(self) -> ConfigEntry:
|
||||
"""Return the config entry linked to the current context."""
|
||||
return self.hass.config_entries.async_get_known_entry(self._entry_id)
|
||||
|
||||
@property
|
||||
def _reconfigure_subentry_id(self) -> str:
|
||||
@ -3514,9 +3475,7 @@ class ConfigSubentryFlow(
|
||||
@callback
|
||||
def _get_reconfigure_subentry(self) -> ConfigSubentry:
|
||||
"""Return the reconfigure config subentry linked to the current context."""
|
||||
entry = self.hass.config_entries.async_get_known_entry(
|
||||
self._reconfigure_entry_id
|
||||
)
|
||||
entry = self.hass.config_entries.async_get_known_entry(self._entry_id)
|
||||
subentry_id = self._reconfigure_subentry_id
|
||||
if subentry_id not in entry.subentries:
|
||||
raise UnknownSubEntry(subentry_id)
|
||||
|
@ -657,6 +657,19 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
):
|
||||
continue
|
||||
|
||||
# Process the section schema options
|
||||
if (
|
||||
suggested_values is not None
|
||||
and isinstance(val, section)
|
||||
and key in suggested_values
|
||||
):
|
||||
new_section_key = copy.copy(key)
|
||||
schema[new_section_key] = val
|
||||
val.schema = self.add_suggested_values_to_schema(
|
||||
copy.deepcopy(val.schema), suggested_values[key]
|
||||
)
|
||||
continue
|
||||
|
||||
new_key = key
|
||||
if (
|
||||
suggested_values
|
||||
|
@ -30,7 +30,7 @@ certifi>=2021.5.30
|
||||
ciso8601==2.3.2
|
||||
cronsim==2.6
|
||||
cryptography==44.0.1
|
||||
dbus-fast==2.41.1
|
||||
dbus-fast==2.43.0
|
||||
fnv-hash-fast==1.4.0
|
||||
go2rtc-client==0.1.2
|
||||
ha-ffmpeg==3.2.2
|
||||
@ -39,7 +39,7 @@ hass-nabucasa==0.94.0
|
||||
hassil==2.2.3
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20250306.0
|
||||
home-assistant-intents==2025.3.5
|
||||
home-assistant-intents==2025.3.23
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
|
14
requirements_all.txt
generated
14
requirements_all.txt
generated
@ -201,7 +201,7 @@ aioaseko==1.0.0
|
||||
aioasuswrt==1.4.0
|
||||
|
||||
# homeassistant.components.husqvarna_automower
|
||||
aioautomower==2025.3.1
|
||||
aioautomower==2025.3.2
|
||||
|
||||
# homeassistant.components.azure_devops
|
||||
aioazuredevops==2.2.1
|
||||
@ -744,7 +744,7 @@ datadog==0.15.0
|
||||
datapoint==0.9.9
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
dbus-fast==2.41.1
|
||||
dbus-fast==2.43.0
|
||||
|
||||
# homeassistant.components.debugpy
|
||||
debugpy==1.8.13
|
||||
@ -977,7 +977,7 @@ gTTS==2.5.3
|
||||
gardena-bluetooth==1.6.0
|
||||
|
||||
# homeassistant.components.google_assistant_sdk
|
||||
gassist-text==0.0.11
|
||||
gassist-text==0.0.12
|
||||
|
||||
# homeassistant.components.google
|
||||
gcal-sync==7.0.0
|
||||
@ -1158,7 +1158,7 @@ holidays==0.68
|
||||
home-assistant-frontend==20250306.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.3.5
|
||||
home-assistant-intents==2025.3.23
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==1.1.7
|
||||
@ -1382,7 +1382,7 @@ mbddns==0.1.2
|
||||
|
||||
# homeassistant.components.mcp
|
||||
# homeassistant.components.mcp_server
|
||||
mcp==1.1.2
|
||||
mcp==1.5.0
|
||||
|
||||
# homeassistant.components.minecraft_server
|
||||
mcstatus==11.1.1
|
||||
@ -1895,7 +1895,7 @@ pydanfossair==0.1.0
|
||||
pydeako==0.6.0
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==118
|
||||
pydeconz==120
|
||||
|
||||
# homeassistant.components.delijn
|
||||
pydelijn==1.1.0
|
||||
@ -2425,7 +2425,7 @@ python-juicenet==1.1.0
|
||||
python-kasa[speedups]==0.10.2
|
||||
|
||||
# homeassistant.components.linkplay
|
||||
python-linkplay==0.2.0
|
||||
python-linkplay==0.2.1
|
||||
|
||||
# homeassistant.components.lirc
|
||||
# python-lirc==1.2.3
|
||||
|
14
requirements_test_all.txt
generated
14
requirements_test_all.txt
generated
@ -189,7 +189,7 @@ aioaseko==1.0.0
|
||||
aioasuswrt==1.4.0
|
||||
|
||||
# homeassistant.components.husqvarna_automower
|
||||
aioautomower==2025.3.1
|
||||
aioautomower==2025.3.2
|
||||
|
||||
# homeassistant.components.azure_devops
|
||||
aioazuredevops==2.2.1
|
||||
@ -640,7 +640,7 @@ datadog==0.15.0
|
||||
datapoint==0.9.9
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
dbus-fast==2.41.1
|
||||
dbus-fast==2.43.0
|
||||
|
||||
# homeassistant.components.debugpy
|
||||
debugpy==1.8.13
|
||||
@ -830,7 +830,7 @@ gTTS==2.5.3
|
||||
gardena-bluetooth==1.6.0
|
||||
|
||||
# homeassistant.components.google_assistant_sdk
|
||||
gassist-text==0.0.11
|
||||
gassist-text==0.0.12
|
||||
|
||||
# homeassistant.components.google
|
||||
gcal-sync==7.0.0
|
||||
@ -984,7 +984,7 @@ holidays==0.68
|
||||
home-assistant-frontend==20250306.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.3.5
|
||||
home-assistant-intents==2025.3.23
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==1.1.7
|
||||
@ -1157,7 +1157,7 @@ mbddns==0.1.2
|
||||
|
||||
# homeassistant.components.mcp
|
||||
# homeassistant.components.mcp_server
|
||||
mcp==1.1.2
|
||||
mcp==1.5.0
|
||||
|
||||
# homeassistant.components.minecraft_server
|
||||
mcstatus==11.1.1
|
||||
@ -1548,7 +1548,7 @@ pydaikin==2.14.1
|
||||
pydeako==0.6.0
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==118
|
||||
pydeconz==120
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.3
|
||||
@ -1961,7 +1961,7 @@ python-juicenet==1.1.0
|
||||
python-kasa[speedups]==0.10.2
|
||||
|
||||
# homeassistant.components.linkplay
|
||||
python-linkplay==0.2.0
|
||||
python-linkplay==0.2.1
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==7.0.0
|
||||
|
@ -173,10 +173,11 @@ IGNORE_VIOLATIONS = {
|
||||
"logbook",
|
||||
# Temporary needed for migration until 2024.10
|
||||
("conversation", "assist_pipeline"),
|
||||
# The onboarding integration provides a limited backup API used during
|
||||
# onboarding. The onboarding integration waits for the backup manager
|
||||
# to be ready before calling any backup functionality.
|
||||
# The onboarding integration provides limited backup and cloud APIs for use
|
||||
# during onboarding. The onboarding integration waits for the backup manager
|
||||
# and cloud to be ready before calling any backup or cloud functionality.
|
||||
("onboarding", "backup"),
|
||||
("onboarding", "cloud"),
|
||||
}
|
||||
|
||||
|
||||
|
2
script/hassfest/docker/Dockerfile
generated
2
script/hassfest/docker/Dockerfile
generated
@ -25,7 +25,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.6.8,source=/uv,target=/bin/uv \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree==2.25.1 tqdm==4.67.1 ruff==0.11.0 \
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.3.5 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.3.23 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
||||
|
@ -1193,7 +1193,7 @@ async def test_subentry_reconfigure_flow(hass: HomeAssistant, client) -> None:
|
||||
async def async_step_reconfigure(self, user_input=None):
|
||||
if user_input is not None:
|
||||
return self.async_update_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
title="Test Entry",
|
||||
data={"test": "blah"},
|
||||
|
@ -32,6 +32,7 @@
|
||||
'it',
|
||||
'ka',
|
||||
'ko',
|
||||
'kw',
|
||||
'lb',
|
||||
'lt',
|
||||
'lv',
|
||||
|
@ -4,6 +4,9 @@ from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.google_generative_ai_conversation.conversation import (
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -41,6 +44,23 @@ async def mock_config_entry_with_assist(
|
||||
return mock_config_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mock_config_entry_with_google_search(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> MockConfigEntry:
|
||||
"""Mock a config entry with assist."""
|
||||
with patch("google.genai.models.AsyncModels.get"):
|
||||
hass.config_entries.async_update_entry(
|
||||
mock_config_entry,
|
||||
options={
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL: True,
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
return mock_config_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mock_init_component(
|
||||
hass: HomeAssistant, mock_config_entry: ConfigEntry
|
||||
|
@ -61,3 +61,34 @@
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_use_google_search
|
||||
list([
|
||||
tuple(
|
||||
'',
|
||||
tuple(
|
||||
),
|
||||
dict({
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.OBJECT: 'OBJECT'>, description=None, enum=None, format=None, items=None, properties={'param1': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.ARRAY: 'ARRAY'>, description='Test parameters', enum=None, format=None, items=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.STRING: 'STRING'>, description=None, enum=None, format=None, items=None, properties=None, required=None), properties=None, required=None), 'param2': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=None, description=None, enum=None, format=None, items=None, properties=None, required=None), 'param3': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.OBJECT: 'OBJECT'>, description=None, enum=None, format=None, items=None, properties={'json': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.STRING: 'STRING'>, description=None, enum=None, format=None, items=None, properties=None, required=None)}, required=[])}, required=[]))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None), Tool(function_declarations=None, retrieval=None, google_search=GoogleSearch(), google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'history': list([
|
||||
]),
|
||||
'model': 'models/gemini-2.0-flash',
|
||||
}),
|
||||
),
|
||||
tuple(
|
||||
'().send_message',
|
||||
tuple(
|
||||
),
|
||||
dict({
|
||||
'message': 'Please call the test function',
|
||||
}),
|
||||
),
|
||||
tuple(
|
||||
'().send_message',
|
||||
tuple(
|
||||
),
|
||||
dict({
|
||||
'message': Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None)], role=None),
|
||||
}),
|
||||
),
|
||||
])
|
||||
# ---
|
||||
|
@ -21,12 +21,14 @@ from homeassistant.components.google_generative_ai_conversation.const import (
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
|
||||
)
|
||||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -143,6 +145,7 @@ async def test_form(hass: HomeAssistant) -> None:
|
||||
CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
|
||||
},
|
||||
),
|
||||
(
|
||||
|
@ -10,7 +10,7 @@ from syrupy.assertion import SnapshotAssertion
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.components.conversation import trace
|
||||
from homeassistant.components.conversation import UserContent, async_get_chat_log, trace
|
||||
from homeassistant.components.google_generative_ai_conversation.conversation import (
|
||||
_escape_decode,
|
||||
_format_schema,
|
||||
@ -18,7 +18,7 @@ from homeassistant.components.google_generative_ai_conversation.conversation imp
|
||||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import intent, llm
|
||||
from homeassistant.helpers import chat_session, intent, llm
|
||||
|
||||
from . import CLIENT_ERROR_500
|
||||
|
||||
@ -176,6 +176,72 @@ async def test_function_call(
|
||||
}
|
||||
|
||||
|
||||
@patch(
|
||||
"homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools"
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_init_component")
|
||||
@pytest.mark.usefixtures("mock_ulid_tools")
|
||||
async def test_use_google_search(
|
||||
mock_get_tools,
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry_with_google_search: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test function calling."""
|
||||
agent_id = "conversation.google_generative_ai_conversation"
|
||||
context = Context()
|
||||
|
||||
mock_tool = AsyncMock()
|
||||
mock_tool.name = "test_tool"
|
||||
mock_tool.description = "Test function"
|
||||
mock_tool.parameters = vol.Schema(
|
||||
{
|
||||
vol.Optional("param1", description="Test parameters"): [
|
||||
vol.All(str, vol.Lower)
|
||||
],
|
||||
vol.Optional("param2"): vol.Any(float, int),
|
||||
vol.Optional("param3"): dict,
|
||||
}
|
||||
)
|
||||
|
||||
mock_get_tools.return_value = [mock_tool]
|
||||
|
||||
with patch("google.genai.chats.AsyncChats.create") as mock_create:
|
||||
mock_chat = AsyncMock()
|
||||
mock_create.return_value.send_message = mock_chat
|
||||
chat_response = Mock(prompt_feedback=None)
|
||||
mock_chat.return_value = chat_response
|
||||
mock_part = Mock()
|
||||
mock_part.text = ""
|
||||
mock_part.function_call = FunctionCall(
|
||||
name="test_tool",
|
||||
args={
|
||||
"param1": ["test_value", "param1\\'s value"],
|
||||
"param2": 2.7,
|
||||
},
|
||||
)
|
||||
|
||||
def tool_call(
|
||||
hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext
|
||||
) -> dict[str, Any]:
|
||||
mock_part.function_call = None
|
||||
mock_part.text = "Hi there!"
|
||||
return {"result": "Test response"}
|
||||
|
||||
mock_tool.async_call.side_effect = tool_call
|
||||
chat_response.candidates = [Mock(content=Mock(parts=[mock_part]))]
|
||||
await conversation.async_converse(
|
||||
hass,
|
||||
"Please call the test function",
|
||||
None,
|
||||
context,
|
||||
agent_id=agent_id,
|
||||
device_id="test_device",
|
||||
)
|
||||
|
||||
assert [tuple(mock_call) for mock_call in mock_create.mock_calls] == snapshot
|
||||
|
||||
|
||||
@patch(
|
||||
"homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools"
|
||||
)
|
||||
@ -627,3 +693,37 @@ async def test_escape_decode() -> None:
|
||||
async def test_format_schema(openapi, genai_schema) -> None:
|
||||
"""Test _format_schema."""
|
||||
assert _format_schema(openapi) == genai_schema
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_init_component")
|
||||
async def test_empty_content_in_chat_history(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Tests that in case of an empty entry in the chat history the google API will receive an injected space sign instead."""
|
||||
with (
|
||||
patch("google.genai.chats.AsyncChats.create") as mock_create,
|
||||
chat_session.async_get_chat_session(hass) as session,
|
||||
async_get_chat_log(hass, session) as chat_log,
|
||||
):
|
||||
mock_chat = AsyncMock()
|
||||
mock_create.return_value.send_message = mock_chat
|
||||
|
||||
# Chat preparation with two inputs, one being an empty string
|
||||
first_input = "First request"
|
||||
second_input = ""
|
||||
chat_log.async_add_user_content(UserContent(first_input))
|
||||
chat_log.async_add_user_content(UserContent(second_input))
|
||||
|
||||
await conversation.async_converse(
|
||||
hass,
|
||||
"Second request",
|
||||
session.conversation_id,
|
||||
Context(),
|
||||
agent_id="conversation.google_generative_ai_conversation",
|
||||
)
|
||||
|
||||
_, kwargs = mock_create.call_args
|
||||
actual_history = kwargs.get("history")
|
||||
|
||||
assert actual_history[0].parts[0].text == first_input
|
||||
assert actual_history[1].parts[0].text == " "
|
||||
|
@ -79,7 +79,7 @@ async def test_number_workarea_commands(
|
||||
freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
mocked_method.assert_called_once_with(TEST_MOWER_ID, 75, 123456)
|
||||
mocked_method.assert_called_once_with(TEST_MOWER_ID, 123456, cutting_height=75)
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state is not None
|
||||
assert state.state == "75"
|
||||
|
@ -96,6 +96,15 @@ async def test_options_flow(hass: HomeAssistant) -> None:
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "options_1"
|
||||
|
||||
section_marker, section_schema = list(result["data_schema"].schema.items())[0]
|
||||
assert section_marker == "section_1"
|
||||
section_schema_markers = list(section_schema.schema.schema)
|
||||
assert len(section_schema_markers) == 2
|
||||
assert section_schema_markers[0] == "bool"
|
||||
assert section_schema_markers[0].description is None
|
||||
assert section_schema_markers[1] == "int"
|
||||
assert section_schema_markers[1].description == {"suggested_value": 10}
|
||||
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={"section_1": {"bool": True, "int": 15}},
|
||||
|
@ -2,9 +2,20 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
from nettigo_air_monitor import ApiError, AuthFailedError
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.button import (
|
||||
DOMAIN as BUTTON_DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
ButtonDeviceClass,
|
||||
)
|
||||
from homeassistant.components.nam import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@ -38,7 +49,7 @@ async def test_button_press(hass: HomeAssistant) -> None:
|
||||
):
|
||||
await hass.services.async_call(
|
||||
BUTTON_DOMAIN,
|
||||
"press",
|
||||
SERVICE_PRESS,
|
||||
{ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"},
|
||||
blocking=True,
|
||||
)
|
||||
@ -49,3 +60,55 @@ async def test_button_press(hass: HomeAssistant) -> None:
|
||||
state = hass.states.get("button.nettigo_air_monitor_restart")
|
||||
assert state
|
||||
assert state.state == now.isoformat()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("exc"), [ApiError("API Error"), ClientError])
|
||||
async def test_button_press_exc(hass: HomeAssistant, exc: Exception) -> None:
|
||||
"""Test button press when exception occurs."""
|
||||
await init_integration(hass)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.nam.NettigoAirMonitor.async_restart",
|
||||
side_effect=exc,
|
||||
),
|
||||
pytest.raises(
|
||||
HomeAssistantError,
|
||||
match="An error occurred while calling action for button.nettigo_air_monitor_restart",
|
||||
),
|
||||
):
|
||||
await hass.services.async_call(
|
||||
BUTTON_DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
{ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
|
||||
async def test_button_press_auth_error(hass: HomeAssistant) -> None:
|
||||
"""Test button press when auth error occurs."""
|
||||
entry = await init_integration(hass)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.nam.NettigoAirMonitor.async_restart",
|
||||
side_effect=AuthFailedError("auth error"),
|
||||
):
|
||||
await hass.services.async_call(
|
||||
BUTTON_DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
{ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
|
||||
flows = hass.config_entries.flow.async_progress()
|
||||
assert len(flows) == 1
|
||||
|
||||
flow = flows[0]
|
||||
assert flow.get("step_id") == "reauth_confirm"
|
||||
assert flow.get("handler") == DOMAIN
|
||||
|
||||
assert "context" in flow
|
||||
assert flow["context"].get("source") == SOURCE_REAUTH
|
||||
assert flow["context"].get("entry_id") == entry.entry_id
|
||||
|
@ -25,11 +25,7 @@ from .common import (
|
||||
simulate_webhook,
|
||||
)
|
||||
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
async_fire_time_changed,
|
||||
async_get_persistent_notifications,
|
||||
)
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.cloud import mock_cloud
|
||||
from tests.typing import WebSocketGenerator
|
||||
|
||||
@ -423,9 +419,8 @@ async def test_setup_component_invalid_token_scope(hass: HomeAssistant) -> None:
|
||||
assert config_entry.state is ConfigEntryState.SETUP_ERROR
|
||||
assert hass.config_entries.async_entries(DOMAIN)
|
||||
|
||||
notifications = async_get_persistent_notifications(hass)
|
||||
|
||||
assert len(notifications) > 0
|
||||
# Test a reauth flow is initiated
|
||||
assert len(list(config_entry.async_get_active_flows(hass, {"reauth"}))) == 1
|
||||
|
||||
for config_entry in hass.config_entries.async_entries("netatmo"):
|
||||
await hass.config_entries.async_remove(config_entry.entry_id)
|
||||
@ -476,8 +471,9 @@ async def test_setup_component_invalid_token(
|
||||
|
||||
assert config_entry.state is ConfigEntryState.SETUP_ERROR
|
||||
assert hass.config_entries.async_entries(DOMAIN)
|
||||
notifications = async_get_persistent_notifications(hass)
|
||||
assert len(notifications) > 0
|
||||
|
||||
# Test a reauth flow is initiated
|
||||
assert len(list(config_entry.async_get_active_flows(hass, {"reauth"}))) == 1
|
||||
|
||||
for entry in hass.config_entries.async_entries("netatmo"):
|
||||
await hass.config_entries.async_remove(entry.entry_id)
|
||||
|
@ -6,12 +6,16 @@ from http import HTTPStatus
|
||||
from io import StringIO
|
||||
import os
|
||||
from typing import Any
|
||||
from unittest.mock import ANY, AsyncMock, Mock, patch
|
||||
from unittest.mock import ANY, DEFAULT, AsyncMock, MagicMock, Mock, patch
|
||||
|
||||
from hass_nabucasa.auth import CognitoAuth
|
||||
from hass_nabucasa.const import STATE_CONNECTED
|
||||
from hass_nabucasa.iot import CloudIoT
|
||||
import pytest
|
||||
from syrupy import SnapshotAssertion
|
||||
|
||||
from homeassistant.components import backup, onboarding
|
||||
from homeassistant.components.cloud import DOMAIN as CLOUD_DOMAIN, CloudClient
|
||||
from homeassistant.components.onboarding import const, views
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@ -1067,3 +1071,139 @@ async def test_onboarding_backup_upload(
|
||||
assert resp.status == 201
|
||||
assert await resp.json() == {"backup_id": "abc123"}
|
||||
mock_receive.assert_called_once_with(agent_ids=["backup.local"], contents=ANY)
|
||||
|
||||
|
||||
@pytest.fixture(name="cloud")
|
||||
async def cloud_fixture() -> AsyncGenerator[MagicMock]:
|
||||
"""Mock the cloud object.
|
||||
|
||||
See the real hass_nabucasa.Cloud class for how to configure the mock.
|
||||
"""
|
||||
with patch(
|
||||
"homeassistant.components.cloud.Cloud", autospec=True
|
||||
) as mock_cloud_class:
|
||||
mock_cloud = mock_cloud_class.return_value
|
||||
|
||||
mock_cloud.auth = MagicMock(spec=CognitoAuth)
|
||||
mock_cloud.iot = MagicMock(
|
||||
spec=CloudIoT, last_disconnect_reason=None, state=STATE_CONNECTED
|
||||
)
|
||||
|
||||
def set_up_mock_cloud(
|
||||
cloud_client: CloudClient, mode: str, **kwargs: Any
|
||||
) -> DEFAULT:
|
||||
"""Set up mock cloud with a mock constructor."""
|
||||
|
||||
# Attributes set in the constructor with parameters.
|
||||
mock_cloud.client = cloud_client
|
||||
|
||||
return DEFAULT
|
||||
|
||||
mock_cloud_class.side_effect = set_up_mock_cloud
|
||||
|
||||
# Attributes that we mock with default values.
|
||||
mock_cloud.id_token = None
|
||||
mock_cloud.is_logged_in = False
|
||||
|
||||
yield mock_cloud
|
||||
|
||||
|
||||
@pytest.fixture(name="setup_cloud")
|
||||
async def setup_cloud_fixture(hass: HomeAssistant, cloud: MagicMock) -> None:
|
||||
"""Fixture that sets up cloud."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
assert await async_setup_component(hass, CLOUD_DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("setup_cloud")
|
||||
async def test_onboarding_cloud_forgot_password(
|
||||
hass: HomeAssistant,
|
||||
hass_storage: dict[str, Any],
|
||||
hass_client: ClientSessionGenerator,
|
||||
cloud: MagicMock,
|
||||
) -> None:
|
||||
"""Test cloud forgot password."""
|
||||
mock_storage(hass_storage, {"done": []})
|
||||
|
||||
assert await async_setup_component(hass, "onboarding", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_client()
|
||||
|
||||
mock_cognito = cloud.auth
|
||||
|
||||
req = await client.post(
|
||||
"/api/onboarding/cloud/forgot_password", json={"email": "hello@bla.com"}
|
||||
)
|
||||
|
||||
assert req.status == HTTPStatus.OK
|
||||
assert mock_cognito.async_forgot_password.call_count == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("setup_cloud")
|
||||
async def test_onboarding_cloud_login(
|
||||
hass: HomeAssistant,
|
||||
hass_storage: dict[str, Any],
|
||||
hass_client: ClientSessionGenerator,
|
||||
cloud: MagicMock,
|
||||
) -> None:
|
||||
"""Test logging out from cloud."""
|
||||
mock_storage(hass_storage, {"done": []})
|
||||
|
||||
assert await async_setup_component(hass, "onboarding", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_client()
|
||||
req = await client.post(
|
||||
"/api/onboarding/cloud/login",
|
||||
json={"email": "my_username", "password": "my_password"},
|
||||
)
|
||||
|
||||
assert req.status == HTTPStatus.OK
|
||||
data = await req.json()
|
||||
assert data == {"cloud_pipeline": None, "success": True}
|
||||
assert cloud.login.call_count == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("setup_cloud")
|
||||
async def test_onboarding_cloud_logout(
|
||||
hass: HomeAssistant,
|
||||
hass_storage: dict[str, Any],
|
||||
hass_client: ClientSessionGenerator,
|
||||
cloud: MagicMock,
|
||||
) -> None:
|
||||
"""Test logging out from cloud."""
|
||||
mock_storage(hass_storage, {"done": []})
|
||||
|
||||
assert await async_setup_component(hass, "onboarding", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_client()
|
||||
req = await client.post("/api/onboarding/cloud/logout")
|
||||
|
||||
assert req.status == HTTPStatus.OK
|
||||
data = await req.json()
|
||||
assert data == {"message": "ok"}
|
||||
assert cloud.logout.call_count == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("setup_cloud")
|
||||
async def test_onboarding_cloud_status(
|
||||
hass: HomeAssistant,
|
||||
hass_storage: dict[str, Any],
|
||||
hass_client: ClientSessionGenerator,
|
||||
cloud: MagicMock,
|
||||
) -> None:
|
||||
"""Test logging out from cloud."""
|
||||
mock_storage(hass_storage, {"done": []})
|
||||
|
||||
assert await async_setup_component(hass, "onboarding", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_client()
|
||||
req = await client.get("/api/onboarding/cloud/status")
|
||||
|
||||
assert req.status == HTTPStatus.OK
|
||||
data = await req.json()
|
||||
assert data == {"logged_in": False}
|
||||
|
@ -569,6 +569,54 @@
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_cooler_door-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.refrigerator_cooler_door',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.DOOR: 'door'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Cooler door',
|
||||
'platform': 'smartthings',
|
||||
'previous_unique_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'cooler_door',
|
||||
'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_cooler_contactSensor_contact',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_cooler_door-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'door',
|
||||
'friendly_name': 'Refrigerator Cooler door',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.refrigerator_cooler_door',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_door-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@ -617,6 +665,54 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_freezer_door-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.refrigerator_freezer_door',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.DOOR: 'door'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Freezer door',
|
||||
'platform': 'smartthings',
|
||||
'previous_unique_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'freezer_door',
|
||||
'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_freezer_contactSensor_contact',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_freezer_door-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'door',
|
||||
'friendly_name': 'Refrigerator Freezer door',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.refrigerator_freezer_door',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_child_lock-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
@ -114,34 +114,34 @@
|
||||
# name: test_sensor[System Monitor Last boot - state]
|
||||
'2024-02-24T15:00:00+00:00'
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Load (15m) - attributes]
|
||||
# name: test_sensor[System Monitor Load (15 min) - attributes]
|
||||
ReadOnlyDict({
|
||||
'friendly_name': 'System Monitor Load (15m)',
|
||||
'friendly_name': 'System Monitor Load (15 min)',
|
||||
'icon': 'mdi:cpu-64-bit',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Load (15m) - state]
|
||||
# name: test_sensor[System Monitor Load (15 min) - state]
|
||||
'3'
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Load (1m) - attributes]
|
||||
# name: test_sensor[System Monitor Load (1 min) - attributes]
|
||||
ReadOnlyDict({
|
||||
'friendly_name': 'System Monitor Load (1m)',
|
||||
'friendly_name': 'System Monitor Load (1 min)',
|
||||
'icon': 'mdi:cpu-64-bit',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Load (1m) - state]
|
||||
# name: test_sensor[System Monitor Load (1 min) - state]
|
||||
'1'
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Load (5m) - attributes]
|
||||
# name: test_sensor[System Monitor Load (5 min) - attributes]
|
||||
ReadOnlyDict({
|
||||
'friendly_name': 'System Monitor Load (5m)',
|
||||
'friendly_name': 'System Monitor Load (5 min)',
|
||||
'icon': 'mdi:cpu-64-bit',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Load (5m) - state]
|
||||
# name: test_sensor[System Monitor Load (5 min) - state]
|
||||
'2'
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Memory free - attributes]
|
||||
|
@ -1421,83 +1421,6 @@ async def test_discovery_notification(
|
||||
assert "config_entry_discovery" not in notifications
|
||||
|
||||
|
||||
async def test_reauth_notification(hass: HomeAssistant) -> None:
|
||||
"""Test that we create/dismiss a notification when source is reauth."""
|
||||
mock_integration(hass, MockModule("test"))
|
||||
mock_platform(hass, "test.config_flow", None)
|
||||
|
||||
entry = MockConfigEntry(title="test_title", domain="test")
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
class TestFlow(config_entries.ConfigFlow):
|
||||
"""Test flow."""
|
||||
|
||||
VERSION = 5
|
||||
|
||||
async def async_step_user(self, user_input):
|
||||
"""Test user step."""
|
||||
return self.async_show_form(step_id="user_confirm")
|
||||
|
||||
async def async_step_user_confirm(self, user_input):
|
||||
"""Test user confirm step."""
|
||||
return self.async_show_form(step_id="user_confirm")
|
||||
|
||||
async def async_step_reauth(self, user_input):
|
||||
"""Test reauth step."""
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
|
||||
async def async_step_reauth_confirm(self, user_input):
|
||||
"""Test reauth confirm step."""
|
||||
return self.async_abort(reason="test")
|
||||
|
||||
with mock_config_flow("test", TestFlow):
|
||||
# Start user flow to assert that reconfigure notification doesn't fire
|
||||
await hass.config_entries.flow.async_init(
|
||||
"test", context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
notifications = async_get_persistent_notifications(hass)
|
||||
assert "config_entry_reconfigure" not in notifications
|
||||
|
||||
# Start first reauth flow to assert that reconfigure notification fires
|
||||
flow1 = await hass.config_entries.flow.async_init(
|
||||
"test",
|
||||
context={
|
||||
"source": config_entries.SOURCE_REAUTH,
|
||||
"entry_id": entry.entry_id,
|
||||
},
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
notifications = async_get_persistent_notifications(hass)
|
||||
assert "config_entry_reconfigure" in notifications
|
||||
|
||||
# Start a second reauth flow so we can finish the first and assert that
|
||||
# the reconfigure notification persists until the second one is complete
|
||||
flow2 = await hass.config_entries.flow.async_init(
|
||||
"test",
|
||||
context={
|
||||
"source": config_entries.SOURCE_REAUTH,
|
||||
"entry_id": entry.entry_id,
|
||||
},
|
||||
)
|
||||
|
||||
flow1 = await hass.config_entries.flow.async_configure(flow1["flow_id"], {})
|
||||
assert flow1["type"] == data_entry_flow.FlowResultType.ABORT
|
||||
|
||||
await hass.async_block_till_done()
|
||||
notifications = async_get_persistent_notifications(hass)
|
||||
assert "config_entry_reconfigure" in notifications
|
||||
|
||||
flow2 = await hass.config_entries.flow.async_configure(flow2["flow_id"], {})
|
||||
assert flow2["type"] == data_entry_flow.FlowResultType.ABORT
|
||||
|
||||
await hass.async_block_till_done()
|
||||
notifications = async_get_persistent_notifications(hass)
|
||||
assert "config_entry_reconfigure" not in notifications
|
||||
|
||||
|
||||
async def test_reauth_issue(
|
||||
hass: HomeAssistant,
|
||||
manager: config_entries.ConfigEntries,
|
||||
@ -6566,7 +6489,7 @@ async def test_update_subentry_and_abort(
|
||||
class SubentryFlowHandler(config_entries.ConfigSubentryFlow):
|
||||
async def async_step_reconfigure(self, user_input=None):
|
||||
return self.async_update_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
**kwargs,
|
||||
)
|
||||
@ -8158,10 +8081,10 @@ async def test_get_reconfigure_entry(
|
||||
assert result["reason"] == "Source is user, expected reconfigure: -"
|
||||
|
||||
|
||||
async def test_subentry_get_reconfigure_entry(
|
||||
async def test_subentry_get_entry(
|
||||
hass: HomeAssistant, manager: config_entries.ConfigEntries
|
||||
) -> None:
|
||||
"""Test subentry _get_reconfigure_entry and _get_reconfigure_subentry behavior."""
|
||||
"""Test subentry _get_entry and _get_reconfigure_subentry behavior."""
|
||||
subentry_id = "mock_subentry_id"
|
||||
entry = MockConfigEntry(
|
||||
data={},
|
||||
@ -8198,13 +8121,13 @@ async def test_subentry_get_reconfigure_entry(
|
||||
async def _async_step_confirm(self):
|
||||
"""Confirm input."""
|
||||
try:
|
||||
entry = self._get_reconfigure_entry()
|
||||
entry = self._get_entry()
|
||||
except ValueError as err:
|
||||
reason = str(err)
|
||||
else:
|
||||
reason = f"Found entry {entry.title}"
|
||||
try:
|
||||
entry_id = self._reconfigure_entry_id
|
||||
entry_id = self._entry_id
|
||||
except ValueError:
|
||||
reason = f"{reason}: -"
|
||||
else:
|
||||
@ -8233,7 +8156,7 @@ async def test_subentry_get_reconfigure_entry(
|
||||
) -> dict[str, type[config_entries.ConfigSubentryFlow]]:
|
||||
return {"test": TestFlow.SubentryFlowHandler}
|
||||
|
||||
# A reconfigure flow finds the config entry
|
||||
# A reconfigure flow finds the config entry and subentry
|
||||
with mock_config_flow("test", TestFlow):
|
||||
result = await entry.start_subentry_reconfigure_flow(hass, "test", subentry_id)
|
||||
assert (
|
||||
@ -8255,14 +8178,14 @@ async def test_subentry_get_reconfigure_entry(
|
||||
== "Found entry entry_title: mock_entry_id/Subentry not found: 01JRemoved"
|
||||
)
|
||||
|
||||
# A user flow does not have access to the config entry or subentry
|
||||
# A user flow finds the config entry but not the subentry
|
||||
with mock_config_flow("test", TestFlow):
|
||||
result = await manager.subentries.async_init(
|
||||
(entry.entry_id, "test"), context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert (
|
||||
result["reason"]
|
||||
== "Source is user, expected reconfigure: -/Source is user, expected reconfigure: -"
|
||||
== "Found entry entry_title: mock_entry_id/Source is user, expected reconfigure: -"
|
||||
)
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user