mirror of
https://github.com/home-assistant/core.git
synced 2025-10-14 22:29:36 +00:00
Compare commits
2 Commits
cursor/add
...
refactor-h
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2dd40ed15f | ||
![]() |
b6d8b56b5b |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 9
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
@@ -525,7 +525,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install -U "pip>=25.2"
|
||||
uv pip install -U "pip>=21.3.1" setuptools wheel
|
||||
uv pip install -r requirements.txt
|
||||
python -m script.gen_requirements_all ci
|
||||
uv pip install -r requirements_all_pytest.txt -r requirements_test.txt
|
||||
|
12
CODEOWNERS
generated
12
CODEOWNERS
generated
@@ -762,8 +762,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @jukrebs
|
||||
/tests/components/iometer/ @jukrebs
|
||||
/homeassistant/components/iometer/ @MaestroOnICe
|
||||
/tests/components/iometer/ @MaestroOnICe
|
||||
/homeassistant/components/ios/ @robbiet480
|
||||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
@@ -1065,8 +1065,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/nilu/ @hfurubotten
|
||||
/homeassistant/components/nina/ @DeerMaximum
|
||||
/tests/components/nina/ @DeerMaximum
|
||||
/homeassistant/components/nintendo_parental_controls/ @pantherale0
|
||||
/tests/components/nintendo_parental_controls/ @pantherale0
|
||||
/homeassistant/components/nintendo_parental/ @pantherale0
|
||||
/tests/components/nintendo_parental/ @pantherale0
|
||||
/homeassistant/components/nissan_leaf/ @filcole
|
||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
@@ -1479,8 +1479,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/snoo/ @Lash-L
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco @tronikos
|
||||
/tests/components/solaredge/ @frenck @bdraco @tronikos
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
||||
/tests/components/solaredge/ @frenck @bdraco
|
||||
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
||||
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
|
||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||
|
@@ -36,8 +36,7 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
USER vscode
|
||||
|
||||
ENV UV_PYTHON=3.13.2
|
||||
RUN uv python install
|
||||
RUN uv python install 3.13.2
|
||||
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
RUN uv venv $VIRTUAL_ENV
|
||||
|
@@ -71,14 +71,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=schema,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"api_key_url": "https://opendata.aemet.es/centrodedescargas/altaUsuario"
|
||||
},
|
||||
)
|
||||
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
|
@@ -14,7 +14,7 @@
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"name": "Name of the integration"
|
||||
},
|
||||
"description": "To generate API key go to {api_key_url}"
|
||||
"description": "To generate API key go to https://opendata.aemet.es/centrodedescargas/altaUsuario"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -30,7 +30,6 @@ generate_data:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
generate_image:
|
||||
fields:
|
||||
task_name:
|
||||
@@ -58,4 +57,3 @@ generate_image:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
|
@@ -18,10 +18,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS
|
||||
|
||||
DESCRIPTION_PLACEHOLDERS = {
|
||||
"developer_registration_url": "https://developer.airly.eu/register",
|
||||
}
|
||||
|
||||
|
||||
class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Airly."""
|
||||
@@ -89,7 +85,6 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "To generate API key go to {developer_registration_url}",
|
||||
"description": "To generate API key go to https://developer.airly.eu/register",
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.4.3"]
|
||||
"requirements": ["aioamazondevices==6.4.0"]
|
||||
}
|
||||
|
@@ -4,15 +4,12 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components.zone import ENTITY_ID_HOME
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigEntryState,
|
||||
@@ -21,13 +18,7 @@ from homeassistant.config_entries import (
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_API_KEY,
|
||||
CONF_LLM_HASS_API,
|
||||
CONF_NAME,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.selector import (
|
||||
@@ -46,23 +37,12 @@ from .const import (
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
RECOMMENDED_WEB_SEARCH,
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -188,14 +168,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH):
|
||||
model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||
errors[CONF_WEB_SEARCH] = "web_search_unsupported_model"
|
||||
elif user_input.get(
|
||||
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||
):
|
||||
user_input.update(await self._get_location_data())
|
||||
|
||||
if not errors:
|
||||
if self._is_new:
|
||||
@@ -243,68 +215,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
errors=errors or None,
|
||||
)
|
||||
|
||||
async def _get_location_data(self) -> dict[str, str]:
|
||||
"""Get approximate location data of the user."""
|
||||
location_data: dict[str, str] = {}
|
||||
zone_home = self.hass.states.get(ENTITY_ID_HOME)
|
||||
if zone_home is not None:
|
||||
client = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
anthropic.AsyncAnthropic,
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
)
|
||||
)
|
||||
location_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
description="Free text input for the city, e.g. `San Francisco`",
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
description="Free text input for the region, e.g. `California`",
|
||||
): str,
|
||||
}
|
||||
)
|
||||
response = await client.messages.create(
|
||||
model=RECOMMENDED_CHAT_MODEL,
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Where are the following coordinates located: "
|
||||
f"({zone_home.attributes[ATTR_LATITUDE]},"
|
||||
f" {zone_home.attributes[ATTR_LONGITUDE]})? Please respond "
|
||||
"only with a JSON object using the following schema:\n"
|
||||
f"{convert(location_schema)}",
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "{", # hints the model to skip any preamble
|
||||
},
|
||||
],
|
||||
max_tokens=RECOMMENDED_MAX_TOKENS,
|
||||
)
|
||||
_LOGGER.debug("Model response: %s", response.content)
|
||||
location_data = location_schema(
|
||||
json.loads(
|
||||
"{"
|
||||
+ "".join(
|
||||
block.text
|
||||
for block in response.content
|
||||
if isinstance(block, anthropic.types.TextBlock)
|
||||
)
|
||||
)
|
||||
or {}
|
||||
)
|
||||
|
||||
if self.hass.config.country:
|
||||
location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country
|
||||
location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone
|
||||
|
||||
_LOGGER.debug("Location data: %s", location_data)
|
||||
|
||||
return location_data
|
||||
|
||||
async_step_user = async_step_set_options
|
||||
async_step_reconfigure = async_step_set_options
|
||||
|
||||
@@ -363,18 +273,6 @@ def anthropic_config_option_schema(
|
||||
CONF_THINKING_BUDGET,
|
||||
default=RECOMMENDED_THINKING_BUDGET,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=RECOMMENDED_WEB_SEARCH,
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
@@ -18,26 +18,9 @@ RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
CONF_WEB_SEARCH = "web_search"
|
||||
RECOMMENDED_WEB_SEARCH = False
|
||||
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
|
||||
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES = 5
|
||||
CONF_WEB_SEARCH_CITY = "city"
|
||||
CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-5", # Both sonnet and haiku
|
||||
"claude-3-opus",
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-3-haiku",
|
||||
"claude-3-opus",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
]
|
||||
|
@@ -1,17 +1,12 @@
|
||||
"""Base entity for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
from dataclasses import dataclass, field
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic.types import (
|
||||
CitationsDelta,
|
||||
CitationsWebSearchResultLocation,
|
||||
CitationWebSearchResultLocationParam,
|
||||
ContentBlockParam,
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
@@ -21,16 +16,11 @@ from anthropic.types import (
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
ServerToolUseBlock,
|
||||
ServerToolUseBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextCitation,
|
||||
TextCitationParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
@@ -39,15 +29,9 @@ from anthropic.types import (
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUnionParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
WebSearchTool20250305Param,
|
||||
WebSearchToolRequestErrorParam,
|
||||
WebSearchToolResultBlock,
|
||||
WebSearchToolResultBlockParam,
|
||||
WebSearchToolResultError,
|
||||
)
|
||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||
from voluptuous_openapi import convert
|
||||
@@ -64,13 +48,6 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
@@ -96,69 +73,6 @@ def _format_tool(
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class CitationDetails:
|
||||
"""Citation details for a content part."""
|
||||
|
||||
index: int = 0
|
||||
"""Start position of the text."""
|
||||
|
||||
length: int = 0
|
||||
"""Length of the relevant data."""
|
||||
|
||||
citations: list[TextCitationParam] = field(default_factory=list)
|
||||
"""Citations for the content part."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ContentDetails:
|
||||
"""Native data for AssistantContent."""
|
||||
|
||||
citation_details: list[CitationDetails] = field(default_factory=list)
|
||||
|
||||
def has_content(self) -> bool:
|
||||
"""Check if there is any content."""
|
||||
return any(detail.length > 0 for detail in self.citation_details)
|
||||
|
||||
def has_citations(self) -> bool:
|
||||
"""Check if there are any citations."""
|
||||
return any(detail.citations for detail in self.citation_details)
|
||||
|
||||
def add_citation_detail(self) -> None:
|
||||
"""Add a new citation detail."""
|
||||
if not self.citation_details or self.citation_details[-1].length > 0:
|
||||
self.citation_details.append(
|
||||
CitationDetails(
|
||||
index=self.citation_details[-1].index
|
||||
+ self.citation_details[-1].length
|
||||
if self.citation_details
|
||||
else 0
|
||||
)
|
||||
)
|
||||
|
||||
def add_citation(self, citation: TextCitation) -> None:
|
||||
"""Add a citation to the current detail."""
|
||||
if not self.citation_details:
|
||||
self.citation_details.append(CitationDetails())
|
||||
citation_param: TextCitationParam | None = None
|
||||
if isinstance(citation, CitationsWebSearchResultLocation):
|
||||
citation_param = CitationWebSearchResultLocationParam(
|
||||
type="web_search_result_location",
|
||||
title=citation.title,
|
||||
url=citation.url,
|
||||
cited_text=citation.cited_text,
|
||||
encrypted_index=citation.encrypted_index,
|
||||
)
|
||||
if citation_param:
|
||||
self.citation_details[-1].citations.append(citation_param)
|
||||
|
||||
def delete_empty(self) -> None:
|
||||
"""Delete empty citation details."""
|
||||
self.citation_details = [
|
||||
detail for detail in self.citation_details if detail.citations
|
||||
]
|
||||
|
||||
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
@@ -167,31 +81,15 @@ def _convert_content(
|
||||
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
if content.tool_name == "web_search":
|
||||
tool_result_block: ContentBlockParam = WebSearchToolResultBlockParam(
|
||||
type="web_search_tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=content.tool_result["content"]
|
||||
if "content" in content.tool_result
|
||||
else WebSearchToolRequestErrorParam(
|
||||
type="web_search_tool_result_error",
|
||||
error_code=content.tool_result.get("error_code", "unavailable"), # type: ignore[typeddict-item]
|
||||
),
|
||||
)
|
||||
external_tool = True
|
||||
else:
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
external_tool = False
|
||||
if not messages or messages[-1]["role"] != (
|
||||
"assistant" if external_tool else "user"
|
||||
):
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="assistant" if external_tool else "user",
|
||||
role="user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
)
|
||||
@@ -253,56 +151,13 @@ def _convert_content(
|
||||
redacted_thinking_block
|
||||
)
|
||||
if content.content:
|
||||
current_index = 0
|
||||
for detail in (
|
||||
content.native.citation_details
|
||||
if isinstance(content.native, ContentDetails)
|
||||
else [CitationDetails(length=len(content.content))]
|
||||
):
|
||||
if detail.index > current_index:
|
||||
# Add text block for any text without citations
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=content.content[current_index : detail.index],
|
||||
)
|
||||
)
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=content.content[
|
||||
detail.index : detail.index + detail.length
|
||||
],
|
||||
citations=detail.citations,
|
||||
)
|
||||
if detail.citations
|
||||
else TextBlockParam(
|
||||
type="text",
|
||||
text=content.content[
|
||||
detail.index : detail.index + detail.length
|
||||
],
|
||||
)
|
||||
)
|
||||
current_index = detail.index + detail.length
|
||||
if current_index < len(content.content):
|
||||
# Add text block for any remaining text without citations
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=content.content[current_index:],
|
||||
)
|
||||
)
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ServerToolUseBlockParam(
|
||||
type="server_tool_use",
|
||||
id=tool_call.id,
|
||||
name="web_search",
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
if tool_call.external and tool_call.tool_name == "web_search"
|
||||
else ToolUseBlockParam(
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
@@ -318,12 +173,10 @@ def _convert_content(
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
async def _transform_stream(
|
||||
chat_log: conversation.ChatLog,
|
||||
stream: AsyncStream[MessageStreamEvent],
|
||||
) -> AsyncGenerator[
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
]:
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
@@ -356,13 +209,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if stream is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None
|
||||
current_tool_block: ToolUseBlockParam | None = None
|
||||
current_tool_args: str
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
input_usage: Usage | None = None
|
||||
has_content = False
|
||||
has_native = False
|
||||
first_block: bool
|
||||
|
||||
async for response in stream:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
@@ -371,7 +222,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
input_usage = response.message.usage
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_tool_block = ToolUseBlockParam(
|
||||
@@ -382,37 +232,17 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||
first_block
|
||||
or (
|
||||
not content_details.has_citations()
|
||||
and response.content_block.citations is None
|
||||
and content_details.has_content()
|
||||
)
|
||||
):
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
if has_content:
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
content_details.add_citation_detail()
|
||||
has_content = True
|
||||
if response.content_block.text:
|
||||
content_details.citation_details[-1].length += len(
|
||||
response.content_block.text
|
||||
)
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
if first_block or has_native:
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
if has_native:
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
has_content = False
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
@@ -420,60 +250,15 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"responses"
|
||||
)
|
||||
if has_native:
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
has_content = False
|
||||
yield {"native": response.content_block}
|
||||
has_native = True
|
||||
elif isinstance(response.content_block, ServerToolUseBlock):
|
||||
current_tool_block = ServerToolUseBlockParam(
|
||||
type="server_tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {
|
||||
"role": "tool_result",
|
||||
"tool_call_id": response.content_block.tool_use_id,
|
||||
"tool_name": "web_search",
|
||||
"tool_result": {
|
||||
"type": "web_search_tool_result_error",
|
||||
"error_code": response.content_block.content.error_code,
|
||||
}
|
||||
if isinstance(
|
||||
response.content_block.content, WebSearchToolResultError
|
||||
)
|
||||
else {
|
||||
"content": [
|
||||
{
|
||||
"type": "web_search_result",
|
||||
"encrypted_content": block.encrypted_content,
|
||||
"page_age": block.page_age,
|
||||
"title": block.title,
|
||||
"url": block.url,
|
||||
}
|
||||
for block in response.content_block.content
|
||||
]
|
||||
},
|
||||
}
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
content_details.citation_details[-1].length += len(response.delta.text)
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
yield {"thinking_content": response.delta.thinking}
|
||||
@@ -486,8 +271,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
)
|
||||
}
|
||||
has_native = True
|
||||
elif isinstance(response.delta, CitationsDelta):
|
||||
content_details.add_citation(response.delta.citation)
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_tool_block is not None:
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
@@ -498,7 +281,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
id=current_tool_block["id"],
|
||||
tool_name=current_tool_block["name"],
|
||||
tool_args=tool_args,
|
||||
external=current_tool_block["type"] == "server_tool_use",
|
||||
)
|
||||
]
|
||||
}
|
||||
@@ -508,12 +290,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
@@ -561,11 +337,21 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
model_args = MessageCreateParamsStreaming(
|
||||
@@ -575,8 +361,8 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
system=system.content,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
@@ -590,34 +376,6 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
tools: list[ToolUnionParam] = []
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
if options.get(CONF_WEB_SEARCH):
|
||||
web_search = WebSearchTool20250305Param(
|
||||
name="web_search",
|
||||
type="web_search_20250305",
|
||||
max_uses=options.get(CONF_WEB_SEARCH_MAX_USES),
|
||||
)
|
||||
if options.get(CONF_WEB_SEARCH_USER_LOCATION):
|
||||
web_search["user_location"] = {
|
||||
"type": "approximate",
|
||||
"city": options.get(CONF_WEB_SEARCH_CITY, ""),
|
||||
"region": options.get(CONF_WEB_SEARCH_REGION, ""),
|
||||
"country": options.get(CONF_WEB_SEARCH_COUNTRY, ""),
|
||||
"timezone": options.get(CONF_WEB_SEARCH_TIMEZONE, ""),
|
||||
}
|
||||
tools.append(web_search)
|
||||
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
|
@@ -35,17 +35,11 @@
|
||||
"temperature": "Temperature",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"recommended": "Recommended model settings",
|
||||
"thinking_budget": "Thinking budget",
|
||||
"web_search": "Enable web search",
|
||||
"web_search_max_uses": "Maximum web searches",
|
||||
"user_location": "Include home location"
|
||||
"thinking_budget_tokens": "Thinking budget"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||
"web_search_max_uses": "Limit the number of searches performed per response",
|
||||
"user_location": "Localize search results based on home location"
|
||||
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -54,8 +48,7 @@
|
||||
"entry_not_loaded": "Cannot add things while the configuration is disabled."
|
||||
},
|
||||
"error": {
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.",
|
||||
"web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search."
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -36,14 +36,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
|
||||
raise ConfigEntryAuthFailed("Migration to OAuth required")
|
||||
|
||||
session = async_create_august_clientsession(hass)
|
||||
try:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
except ValueError as err:
|
||||
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
||||
)
|
||||
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||
try:
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/control4",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyControl4"],
|
||||
"requirements": ["pyControl4==1.5.0"],
|
||||
"requirements": ["pyControl4==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "c4:director"
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.1.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"]
|
||||
}
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from elevenlabs import AsyncElevenLabs, Model
|
||||
from elevenlabs.core import ApiError
|
||||
@@ -19,14 +18,9 @@ from homeassistant.exceptions import (
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_MODEL, CONF_STT_MODEL
|
||||
from .const import CONF_MODEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.STT,
|
||||
Platform.TTS,
|
||||
]
|
||||
PLATFORMS: list[Platform] = [Platform.TTS]
|
||||
|
||||
|
||||
async def get_model_by_id(client: AsyncElevenLabs, model_id: str) -> Model | None:
|
||||
@@ -45,7 +39,6 @@ class ElevenLabsData:
|
||||
|
||||
client: AsyncElevenLabs
|
||||
model: Model
|
||||
stt_model: str
|
||||
|
||||
|
||||
type ElevenLabsConfigEntry = ConfigEntry[ElevenLabsData]
|
||||
@@ -69,9 +62,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -
|
||||
if model is None or (not model.languages):
|
||||
raise ConfigEntryError("Model could not be resolved")
|
||||
|
||||
entry.runtime_data = ElevenLabsData(
|
||||
client=client, model=model, stt_model=entry.options[CONF_STT_MODEL]
|
||||
)
|
||||
entry.runtime_data = ElevenLabsData(client=client, model=model)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
@@ -87,44 +78,3 @@ async def update_listener(
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: ElevenLabsConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old config entry to new format."""
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if config_entry.version == 1:
|
||||
new_options = {**config_entry.options}
|
||||
|
||||
if config_entry.minor_version < 2:
|
||||
# Add defaults only if they’re not already present
|
||||
if "stt_auto_language" not in new_options:
|
||||
new_options["stt_auto_language"] = False
|
||||
if "stt_model" not in new_options:
|
||||
new_options["stt_model"] = "scribe_v1"
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
options=new_options,
|
||||
minor_version=2,
|
||||
version=1,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True # already up to date
|
||||
|
@@ -25,20 +25,15 @@ from .const import (
|
||||
CONF_MODEL,
|
||||
CONF_SIMILARITY,
|
||||
CONF_STABILITY,
|
||||
CONF_STT_AUTO_LANGUAGE,
|
||||
CONF_STT_MODEL,
|
||||
CONF_STYLE,
|
||||
CONF_USE_SPEAKER_BOOST,
|
||||
CONF_VOICE,
|
||||
DEFAULT_MODEL,
|
||||
DEFAULT_SIMILARITY,
|
||||
DEFAULT_STABILITY,
|
||||
DEFAULT_STT_AUTO_LANGUAGE,
|
||||
DEFAULT_STT_MODEL,
|
||||
DEFAULT_STYLE,
|
||||
DEFAULT_TTS_MODEL,
|
||||
DEFAULT_USE_SPEAKER_BOOST,
|
||||
DOMAIN,
|
||||
STT_MODELS,
|
||||
)
|
||||
|
||||
USER_STEP_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str})
|
||||
@@ -73,7 +68,6 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for ElevenLabs text-to-speech."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -94,12 +88,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title="ElevenLabs",
|
||||
data=user_input,
|
||||
options={
|
||||
CONF_MODEL: DEFAULT_TTS_MODEL,
|
||||
CONF_VOICE: list(voices)[0],
|
||||
CONF_STT_MODEL: DEFAULT_STT_MODEL,
|
||||
CONF_STT_AUTO_LANGUAGE: False,
|
||||
},
|
||||
options={CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: list(voices)[0]},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=USER_STEP_SCHEMA, errors=errors
|
||||
@@ -124,9 +113,6 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
self.models: dict[str, str] = {}
|
||||
self.model: str | None = None
|
||||
self.voice: str | None = None
|
||||
self.stt_models: dict[str, str] = STT_MODELS
|
||||
self.stt_model: str | None = None
|
||||
self.auto_language: bool | None = None
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -140,8 +126,6 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
if user_input is not None:
|
||||
self.model = user_input[CONF_MODEL]
|
||||
self.voice = user_input[CONF_VOICE]
|
||||
self.stt_model = user_input[CONF_STT_MODEL]
|
||||
self.auto_language = user_input[CONF_STT_AUTO_LANGUAGE]
|
||||
configure_voice = user_input.pop(CONF_CONFIGURE_VOICE)
|
||||
if configure_voice:
|
||||
return await self.async_step_voice_settings()
|
||||
@@ -181,22 +165,6 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
]
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
CONF_STT_MODEL,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(label=model_name, value=model_id)
|
||||
for model_id, model_name in self.stt_models.items()
|
||||
]
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
CONF_STT_AUTO_LANGUAGE,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_STT_AUTO_LANGUAGE, DEFAULT_STT_AUTO_LANGUAGE
|
||||
),
|
||||
): bool,
|
||||
vol.Required(CONF_CONFIGURE_VOICE, default=False): bool,
|
||||
}
|
||||
),
|
||||
@@ -211,8 +179,6 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
if user_input is not None:
|
||||
user_input[CONF_MODEL] = self.model
|
||||
user_input[CONF_VOICE] = self.voice
|
||||
user_input[CONF_STT_MODEL] = self.stt_model
|
||||
user_input[CONF_STT_AUTO_LANGUAGE] = self.auto_language
|
||||
return self.async_create_entry(
|
||||
title="ElevenLabs",
|
||||
data=user_input,
|
||||
|
@@ -7,123 +7,12 @@ CONF_MODEL = "model"
|
||||
CONF_CONFIGURE_VOICE = "configure_voice"
|
||||
CONF_STABILITY = "stability"
|
||||
CONF_SIMILARITY = "similarity"
|
||||
CONF_STT_AUTO_LANGUAGE = "stt_auto_language"
|
||||
CONF_STT_MODEL = "stt_model"
|
||||
CONF_STYLE = "style"
|
||||
CONF_USE_SPEAKER_BOOST = "use_speaker_boost"
|
||||
DOMAIN = "elevenlabs"
|
||||
|
||||
DEFAULT_TTS_MODEL = "eleven_multilingual_v2"
|
||||
DEFAULT_MODEL = "eleven_multilingual_v2"
|
||||
DEFAULT_STABILITY = 0.5
|
||||
DEFAULT_SIMILARITY = 0.75
|
||||
DEFAULT_STT_AUTO_LANGUAGE = False
|
||||
DEFAULT_STT_MODEL = "scribe_v1"
|
||||
DEFAULT_STYLE = 0
|
||||
DEFAULT_USE_SPEAKER_BOOST = True
|
||||
|
||||
STT_LANGUAGES = [
|
||||
"af-ZA", # Afrikaans
|
||||
"am-ET", # Amharic
|
||||
"ar-SA", # Arabic
|
||||
"hy-AM", # Armenian
|
||||
"as-IN", # Assamese
|
||||
"ast-ES", # Asturian
|
||||
"az-AZ", # Azerbaijani
|
||||
"be-BY", # Belarusian
|
||||
"bn-IN", # Bengali
|
||||
"bs-BA", # Bosnian
|
||||
"bg-BG", # Bulgarian
|
||||
"my-MM", # Burmese
|
||||
"yue-HK", # Cantonese
|
||||
"ca-ES", # Catalan
|
||||
"ceb-PH", # Cebuano
|
||||
"ny-MW", # Chichewa
|
||||
"hr-HR", # Croatian
|
||||
"cs-CZ", # Czech
|
||||
"da-DK", # Danish
|
||||
"nl-NL", # Dutch
|
||||
"en-US", # English
|
||||
"et-EE", # Estonian
|
||||
"fil-PH", # Filipino
|
||||
"fi-FI", # Finnish
|
||||
"fr-FR", # French
|
||||
"ff-SN", # Fulah
|
||||
"gl-ES", # Galician
|
||||
"lg-UG", # Ganda
|
||||
"ka-GE", # Georgian
|
||||
"de-DE", # German
|
||||
"el-GR", # Greek
|
||||
"gu-IN", # Gujarati
|
||||
"ha-NG", # Hausa
|
||||
"he-IL", # Hebrew
|
||||
"hi-IN", # Hindi
|
||||
"hu-HU", # Hungarian
|
||||
"is-IS", # Icelandic
|
||||
"ig-NG", # Igbo
|
||||
"id-ID", # Indonesian
|
||||
"ga-IE", # Irish
|
||||
"it-IT", # Italian
|
||||
"ja-JP", # Japanese
|
||||
"jv-ID", # Javanese
|
||||
"kea-CV", # Kabuverdianu
|
||||
"kn-IN", # Kannada
|
||||
"kk-KZ", # Kazakh
|
||||
"km-KH", # Khmer
|
||||
"ko-KR", # Korean
|
||||
"ku-TR", # Kurdish
|
||||
"ky-KG", # Kyrgyz
|
||||
"lo-LA", # Lao
|
||||
"lv-LV", # Latvian
|
||||
"ln-CD", # Lingala
|
||||
"lt-LT", # Lithuanian
|
||||
"luo-KE", # Luo
|
||||
"lb-LU", # Luxembourgish
|
||||
"mk-MK", # Macedonian
|
||||
"ms-MY", # Malay
|
||||
"ml-IN", # Malayalam
|
||||
"mt-MT", # Maltese
|
||||
"zh-CN", # Mandarin Chinese
|
||||
"mi-NZ", # Māori
|
||||
"mr-IN", # Marathi
|
||||
"mn-MN", # Mongolian
|
||||
"ne-NP", # Nepali
|
||||
"nso-ZA", # Northern Sotho
|
||||
"no-NO", # Norwegian
|
||||
"oc-FR", # Occitan
|
||||
"or-IN", # Odia
|
||||
"ps-AF", # Pashto
|
||||
"fa-IR", # Persian
|
||||
"pl-PL", # Polish
|
||||
"pt-PT", # Portuguese
|
||||
"pa-IN", # Punjabi
|
||||
"ro-RO", # Romanian
|
||||
"ru-RU", # Russian
|
||||
"sr-RS", # Serbian
|
||||
"sn-ZW", # Shona
|
||||
"sd-PK", # Sindhi
|
||||
"sk-SK", # Slovak
|
||||
"sl-SI", # Slovenian
|
||||
"so-SO", # Somali
|
||||
"es-ES", # Spanish
|
||||
"sw-KE", # Swahili
|
||||
"sv-SE", # Swedish
|
||||
"ta-IN", # Tamil
|
||||
"tg-TJ", # Tajik
|
||||
"te-IN", # Telugu
|
||||
"th-TH", # Thai
|
||||
"tr-TR", # Turkish
|
||||
"uk-UA", # Ukrainian
|
||||
"umb-AO", # Umbundu
|
||||
"ur-PK", # Urdu
|
||||
"uz-UZ", # Uzbek
|
||||
"vi-VN", # Vietnamese
|
||||
"cy-GB", # Welsh
|
||||
"wo-SN", # Wolof
|
||||
"xh-ZA", # Xhosa
|
||||
"zu-ZA", # Zulu
|
||||
]
|
||||
|
||||
STT_MODELS = {
|
||||
"scribe_v1": "Scribe v1",
|
||||
"scribe_v1_experimental": "Scribe v1 Experimental",
|
||||
}
|
||||
|
@@ -21,15 +21,11 @@
|
||||
"data": {
|
||||
"voice": "Voice",
|
||||
"model": "Model",
|
||||
"stt_model": "Speech-to-Text Model",
|
||||
"stt_auto_language": "Auto-detect language",
|
||||
"configure_voice": "Configure advanced voice settings"
|
||||
},
|
||||
"data_description": {
|
||||
"voice": "Voice to use for text-to-speech.",
|
||||
"voice": "Voice to use for the TTS.",
|
||||
"model": "ElevenLabs model to use. Please note that not all models support all languages equally well.",
|
||||
"stt_model": "Speech-to-Text model to use.",
|
||||
"stt_auto_language": "Automatically detect the spoken language for speech-to-text.",
|
||||
"configure_voice": "Configure advanced voice settings. Find more information in the ElevenLabs documentation."
|
||||
}
|
||||
},
|
||||
@@ -48,17 +44,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"tts": {
|
||||
"elevenlabs_tts": {
|
||||
"name": "Text-to-Speech"
|
||||
}
|
||||
},
|
||||
"stt": {
|
||||
"elevenlabs_stt": {
|
||||
"name": "Speech-to-Text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,207 +0,0 @@
|
||||
"""Support for the ElevenLabs speech-to-text service."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterable
|
||||
from io import BytesIO
|
||||
import logging
|
||||
|
||||
from elevenlabs import AsyncElevenLabs
|
||||
from elevenlabs.core import ApiError
|
||||
from elevenlabs.types import Model
|
||||
|
||||
from homeassistant.components import stt
|
||||
from homeassistant.components.stt import (
|
||||
AudioBitRates,
|
||||
AudioChannels,
|
||||
AudioCodecs,
|
||||
AudioFormats,
|
||||
AudioSampleRates,
|
||||
SpeechMetadata,
|
||||
SpeechResultState,
|
||||
SpeechToTextEntity,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import ElevenLabsConfigEntry
|
||||
from .const import (
|
||||
CONF_STT_AUTO_LANGUAGE,
|
||||
DEFAULT_STT_AUTO_LANGUAGE,
|
||||
DOMAIN,
|
||||
STT_LANGUAGES,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PARALLEL_UPDATES = 10
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ElevenLabsConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up ElevenLabs stt platform via config entry."""
|
||||
client = config_entry.runtime_data.client
|
||||
auto_detect = config_entry.options.get(
|
||||
CONF_STT_AUTO_LANGUAGE, DEFAULT_STT_AUTO_LANGUAGE
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
ElevenLabsSTTEntity(
|
||||
client,
|
||||
config_entry.runtime_data.model,
|
||||
config_entry.runtime_data.stt_model,
|
||||
config_entry.entry_id,
|
||||
auto_detect_language=auto_detect,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class ElevenLabsSTTEntity(SpeechToTextEntity):
|
||||
"""The ElevenLabs STT API entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "elevenlabs_stt"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
client: AsyncElevenLabs,
|
||||
model: Model,
|
||||
stt_model: str,
|
||||
entry_id: str,
|
||||
auto_detect_language: bool = False,
|
||||
) -> None:
|
||||
"""Init ElevenLabs TTS service."""
|
||||
self._client = client
|
||||
self._auto_detect_language = auto_detect_language
|
||||
self._stt_model = stt_model
|
||||
|
||||
# Entity attributes
|
||||
self._attr_unique_id = entry_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, entry_id)},
|
||||
manufacturer="ElevenLabs",
|
||||
model=model.name,
|
||||
name="ElevenLabs",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> list[str]:
|
||||
"""Return a list of supported languages."""
|
||||
return STT_LANGUAGES
|
||||
|
||||
@property
|
||||
def supported_formats(self) -> list[AudioFormats]:
|
||||
"""Return a list of supported formats."""
|
||||
return [AudioFormats.WAV, AudioFormats.OGG]
|
||||
|
||||
@property
|
||||
def supported_codecs(self) -> list[AudioCodecs]:
|
||||
"""Return a list of supported codecs."""
|
||||
return [AudioCodecs.PCM, AudioCodecs.OPUS]
|
||||
|
||||
@property
|
||||
def supported_bit_rates(self) -> list[AudioBitRates]:
|
||||
"""Return a list of supported bit rates."""
|
||||
return [AudioBitRates.BITRATE_16]
|
||||
|
||||
@property
|
||||
def supported_sample_rates(self) -> list[AudioSampleRates]:
|
||||
"""Return a list of supported sample rates."""
|
||||
return [AudioSampleRates.SAMPLERATE_16000]
|
||||
|
||||
@property
|
||||
def supported_channels(self) -> list[AudioChannels]:
|
||||
"""Return a list of supported channels."""
|
||||
return [
|
||||
AudioChannels.CHANNEL_MONO,
|
||||
AudioChannels.CHANNEL_STEREO,
|
||||
]
|
||||
|
||||
async def async_process_audio_stream(
|
||||
self, metadata: SpeechMetadata, stream: AsyncIterable[bytes]
|
||||
) -> stt.SpeechResult:
|
||||
"""Process an audio stream to STT service."""
|
||||
_LOGGER.debug(
|
||||
"Processing audio stream for STT: model=%s, language=%s, format=%s, codec=%s, sample_rate=%s, channels=%s, bit_rate=%s",
|
||||
self._stt_model,
|
||||
metadata.language,
|
||||
metadata.format,
|
||||
metadata.codec,
|
||||
metadata.sample_rate,
|
||||
metadata.channel,
|
||||
metadata.bit_rate,
|
||||
)
|
||||
|
||||
if self._auto_detect_language:
|
||||
lang_code = None
|
||||
else:
|
||||
language = metadata.language
|
||||
if language.lower() not in [lang.lower() for lang in STT_LANGUAGES]:
|
||||
_LOGGER.warning("Unsupported language: %s", language)
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
lang_code = language.split("-")[0]
|
||||
|
||||
raw_pcm_compatible = (
|
||||
metadata.codec == AudioCodecs.PCM
|
||||
and metadata.sample_rate == AudioSampleRates.SAMPLERATE_16000
|
||||
and metadata.channel == AudioChannels.CHANNEL_MONO
|
||||
and metadata.bit_rate == AudioBitRates.BITRATE_16
|
||||
)
|
||||
if raw_pcm_compatible:
|
||||
file_format = "pcm_s16le_16"
|
||||
elif metadata.codec == AudioCodecs.PCM:
|
||||
_LOGGER.warning("PCM input does not meet expected raw format requirements")
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
else:
|
||||
file_format = "other"
|
||||
|
||||
audio = b""
|
||||
async for chunk in stream:
|
||||
audio += chunk
|
||||
|
||||
_LOGGER.debug("Finished reading audio stream, total size: %d bytes", len(audio))
|
||||
if not audio:
|
||||
_LOGGER.warning("No audio received in stream")
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
|
||||
lang_display = lang_code if lang_code else "auto-detected"
|
||||
|
||||
_LOGGER.debug(
|
||||
"Transcribing audio (%s), format: %s, size: %d bytes",
|
||||
lang_display,
|
||||
file_format,
|
||||
len(audio),
|
||||
)
|
||||
|
||||
try:
|
||||
response = await self._client.speech_to_text.convert(
|
||||
file=BytesIO(audio),
|
||||
file_format=file_format,
|
||||
model_id=self._stt_model,
|
||||
language_code=lang_code,
|
||||
tag_audio_events=False,
|
||||
num_speakers=1,
|
||||
diarize=False,
|
||||
)
|
||||
except ApiError as exc:
|
||||
_LOGGER.error("Error during processing of STT request: %s", exc)
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
|
||||
text = response.text or ""
|
||||
detected_lang_code = response.language_code or "?"
|
||||
detected_lang_prob = response.language_probability or "?"
|
||||
|
||||
_LOGGER.debug(
|
||||
"Transcribed text is in language %s (probability %s): %s",
|
||||
detected_lang_code,
|
||||
detected_lang_prob,
|
||||
text,
|
||||
)
|
||||
|
||||
return stt.SpeechResult(text, SpeechResultState.SUCCESS)
|
@@ -71,6 +71,7 @@ async def async_setup_entry(
|
||||
voices,
|
||||
default_voice_id,
|
||||
config_entry.entry_id,
|
||||
config_entry.title,
|
||||
voice_settings,
|
||||
)
|
||||
]
|
||||
@@ -82,8 +83,6 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
|
||||
_attr_supported_options = [ATTR_VOICE, ATTR_MODEL]
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "elevenlabs_tts"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -92,6 +91,7 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
voices: list[ElevenLabsVoice],
|
||||
default_voice_id: str,
|
||||
entry_id: str,
|
||||
title: str,
|
||||
voice_settings: VoiceSettings,
|
||||
) -> None:
|
||||
"""Init ElevenLabs TTS service."""
|
||||
@@ -112,11 +112,11 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
|
||||
# Entity attributes
|
||||
self._attr_unique_id = entry_id
|
||||
self._attr_name = title
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, entry_id)},
|
||||
manufacturer="ElevenLabs",
|
||||
model=model.name,
|
||||
name="ElevenLabs",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._attr_supported_languages = [
|
||||
|
@@ -47,8 +47,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ECConfigEntry) ->
|
||||
radar_coordinator = ECDataUpdateCoordinator(
|
||||
hass, config_entry, radar_data, "radar", DEFAULT_RADAR_UPDATE_INTERVAL
|
||||
)
|
||||
# Skip initial refresh for radar since the camera entity is disabled by default.
|
||||
# The coordinator will fetch data when the entity is enabled.
|
||||
try:
|
||||
await radar_coordinator.async_config_entry_first_refresh()
|
||||
except ConfigEntryNotReady:
|
||||
errors = errors + 1
|
||||
_LOGGER.warning("Unable to retrieve Environment Canada radar")
|
||||
|
||||
aqhi_data = ECAirQuality(coordinates=(lat, lon))
|
||||
aqhi_coordinator = ECDataUpdateCoordinator(
|
||||
@@ -60,9 +63,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ECConfigEntry) ->
|
||||
errors = errors + 1
|
||||
_LOGGER.warning("Unable to retrieve Environment Canada AQHI")
|
||||
|
||||
# Require at least one coordinator to succeed (weather or AQHI)
|
||||
# Radar is optional since the camera entity is disabled by default
|
||||
if errors >= 2:
|
||||
if errors == 3:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
config_entry.runtime_data = ECRuntimeData(
|
||||
|
@@ -59,14 +59,6 @@ class ECCameraEntity(CoordinatorEntity[ECDataUpdateCoordinator[ECRadar]], Camera
|
||||
|
||||
self.content_type = "image/gif"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
# Trigger coordinator refresh when entity is enabled
|
||||
# since radar coordinator skips initial refresh during setup
|
||||
if not self.coordinator.last_update_success:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
def camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
|
@@ -6,18 +6,11 @@ import xml.etree.ElementTree as ET
|
||||
|
||||
import aiohttp
|
||||
from env_canada import ECWeather, ec_exc
|
||||
from env_canada.ec_weather import get_ec_sites_list
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_STATION, CONF_TITLE, DOMAIN
|
||||
|
||||
@@ -32,16 +25,14 @@ async def validate_input(data):
|
||||
lang = data.get(CONF_LANGUAGE).lower()
|
||||
|
||||
if station:
|
||||
# When station is provided, use it and get the coordinates from ECWeather
|
||||
weather_data = ECWeather(station_id=station, language=lang)
|
||||
await weather_data.update()
|
||||
# Always use the station's coordinates, not the user-provided ones
|
||||
else:
|
||||
weather_data = ECWeather(coordinates=(lat, lon), language=lang)
|
||||
await weather_data.update()
|
||||
|
||||
if lat is None or lon is None:
|
||||
lat = weather_data.lat
|
||||
lon = weather_data.lon
|
||||
else:
|
||||
# When no station is provided, use coordinates to find nearest station
|
||||
weather_data = ECWeather(coordinates=(lat, lon), language=lang)
|
||||
await weather_data.update()
|
||||
|
||||
return {
|
||||
CONF_TITLE: weather_data.metadata.location,
|
||||
@@ -55,13 +46,6 @@ class EnvironmentCanadaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Environment Canada weather."""
|
||||
|
||||
VERSION = 1
|
||||
_station_codes: list[dict[str, str]] | None = None
|
||||
|
||||
async def _get_station_codes(self) -> list[dict[str, str]]:
|
||||
"""Get station codes, cached after first call."""
|
||||
if self._station_codes is None:
|
||||
self._station_codes = await get_ec_sites_list()
|
||||
return self._station_codes
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -96,21 +80,9 @@ class EnvironmentCanadaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=info[CONF_TITLE], data=user_input)
|
||||
|
||||
station_codes = await self._get_station_codes()
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_STATION): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
value=station["value"], label=station["label"]
|
||||
)
|
||||
for station in station_codes
|
||||
],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_STATION): str,
|
||||
vol.Optional(
|
||||
CONF_LATITUDE, default=self.hass.config.latitude
|
||||
): cv.latitude,
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.12.1"]
|
||||
"requirements": ["env-canada==0.11.3"]
|
||||
}
|
||||
|
@@ -3,11 +3,11 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Environment Canada: weather location and language",
|
||||
"description": "Select a weather station from the dropdown, or specify coordinates to use the closest station. The default coordinates are from your Home Assistant installation. Weather information can be retrieved in English or French.",
|
||||
"description": "Either a station ID or latitude/longitude must be specified. The default latitude/longitude used are the values configured in your Home Assistant installation. The closest weather station to the coordinates will be used if specifying coordinates. If a station code is used it must follow the format: PP/code, where PP is the two-letter province and code is the station ID. The list of station IDs can be found here: https://dd.weather.gc.ca/citypage_weather/docs/site_list_towns_en.csv. Weather information can be retrieved in either English or French.",
|
||||
"data": {
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"station": "Weather station",
|
||||
"station": "Weather station ID",
|
||||
"language": "Weather information language"
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,7 @@
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
},
|
||||
"error": {
|
||||
"bad_station_id": "Station code is invalid, missing, or not found in the station code database",
|
||||
"bad_station_id": "Station ID is invalid, missing, or not found in the station ID database",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"error_response": "Response from Environment Canada in error",
|
||||
"too_many_attempts": "Connections to Environment Canada are rate limited; Try again in 60 seconds",
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.14.0",
|
||||
"aioesphomeapi==41.13.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
@@ -13,14 +13,27 @@ from homeassistant.components.climate import (
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature
|
||||
from homeassistant.const import (
|
||||
ATTR_BATTERY_LEVEL,
|
||||
ATTR_TEMPERATURE,
|
||||
PRECISION_HALVES,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import (
|
||||
ATTR_STATE_BATTERY_LOW,
|
||||
ATTR_STATE_HOLIDAY_MODE,
|
||||
ATTR_STATE_SUMMER_MODE,
|
||||
ATTR_STATE_WINDOW_OPEN,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
from .model import ClimateExtraAttributes
|
||||
from .sensor import value_scheduled_preset
|
||||
|
||||
HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF]
|
||||
@@ -189,6 +202,26 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
|
||||
self.check_active_or_lock_mode()
|
||||
await self.async_set_hkr_state(PRESET_API_HKR_STATE_MAPPING[preset_mode])
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> ClimateExtraAttributes:
|
||||
"""Return the device specific state attributes."""
|
||||
# deprecated with #143394, can be removed in 2025.11
|
||||
attrs: ClimateExtraAttributes = {
|
||||
ATTR_STATE_BATTERY_LOW: self.data.battery_low,
|
||||
}
|
||||
|
||||
# the following attributes are available since fritzos 7
|
||||
if self.data.battery_level is not None:
|
||||
attrs[ATTR_BATTERY_LEVEL] = self.data.battery_level
|
||||
if self.data.holiday_active is not None:
|
||||
attrs[ATTR_STATE_HOLIDAY_MODE] = self.data.holiday_active
|
||||
if self.data.summer_active is not None:
|
||||
attrs[ATTR_STATE_SUMMER_MODE] = self.data.summer_active
|
||||
if self.data.window_open is not None:
|
||||
attrs[ATTR_STATE_WINDOW_OPEN] = self.data.window_open
|
||||
|
||||
return attrs
|
||||
|
||||
def check_active_or_lock_mode(self) -> None:
|
||||
"""Check if in summer/vacation mode or lock enabled."""
|
||||
if self.data.holiday_active or self.data.summer_active:
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251001.4"]
|
||||
"requirements": ["home-assistant-frontend==20251001.0"]
|
||||
}
|
||||
|
@@ -8,12 +8,7 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
@@ -45,12 +40,6 @@ class OAuth2FlowHandler(
|
||||
"prompt": "consent",
|
||||
}
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reconfiguration flow."""
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
@@ -71,10 +60,6 @@ class OAuth2FlowHandler(
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(), data=data
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_NAME,
|
||||
|
@@ -30,8 +30,7 @@
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
@@ -13,6 +13,7 @@ import struct
|
||||
from typing import Any, NamedTuple
|
||||
|
||||
from aiohasupervisor import SupervisorError
|
||||
from aiohasupervisor.models import GreenOptions, YellowOptions # noqa: F401
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.const import GROUP_ID_ADMIN
|
||||
@@ -123,11 +124,6 @@ from .discovery import async_setup_discovery_view
|
||||
from .handler import ( # noqa: F401
|
||||
HassIO,
|
||||
HassioAPIError,
|
||||
async_create_backup,
|
||||
async_get_green_settings,
|
||||
async_get_yellow_settings,
|
||||
async_set_green_settings,
|
||||
async_set_yellow_settings,
|
||||
async_update_diagnostics,
|
||||
get_supervisor_client,
|
||||
)
|
||||
|
@@ -15,13 +15,14 @@ from aiohasupervisor.models import (
|
||||
AddonsOptions,
|
||||
AddonState as SupervisorAddonState,
|
||||
InstalledAddonComplete,
|
||||
PartialBackupOptions,
|
||||
StoreAddonUpdate,
|
||||
)
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .handler import HassioAPIError, async_create_backup, get_supervisor_client
|
||||
from .handler import HassioAPIError, get_supervisor_client
|
||||
|
||||
type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Awaitable[_R]]
|
||||
type _ReturnFuncType[_T, **_P, _R] = Callable[
|
||||
@@ -261,17 +262,18 @@ class AddonManager:
|
||||
"""Stop the managed add-on."""
|
||||
await self._supervisor_client.addons.stop_addon(self.addon_slug)
|
||||
|
||||
@api_error("Failed to create a backup of the {addon_name} add-on")
|
||||
@api_error(
|
||||
"Failed to create a backup of the {addon_name} add-on",
|
||||
expected_error_type=SupervisorError,
|
||||
)
|
||||
async def async_create_backup(self) -> None:
|
||||
"""Create a partial backup of the managed add-on."""
|
||||
addon_info = await self.async_get_addon_info()
|
||||
name = f"addon_{self.addon_slug}_{addon_info.version}"
|
||||
|
||||
self._logger.debug("Creating backup: %s", name)
|
||||
await async_create_backup(
|
||||
self._hass,
|
||||
{"name": name, "addons": [self.addon_slug]},
|
||||
partial=True,
|
||||
await self._supervisor_client.backups.partial_backup(
|
||||
PartialBackupOptions(name=name, addons={self.addon_slug})
|
||||
)
|
||||
|
||||
async def async_configure_addon(
|
||||
|
@@ -10,6 +10,7 @@ import os
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor import SupervisorClient
|
||||
from aiohasupervisor.models import SupervisorOptions
|
||||
import aiohttp
|
||||
from yarl import URL
|
||||
|
||||
@@ -22,7 +23,6 @@ from homeassistant.components.http import (
|
||||
from homeassistant.const import SERVER_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
from .const import ATTR_MESSAGE, ATTR_RESULT, DATA_COMPONENT, X_HASS_SOURCE
|
||||
|
||||
@@ -66,73 +66,6 @@ def api_data[**_P](
|
||||
return _wrapper
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_update_diagnostics(hass: HomeAssistant, diagnostics: bool) -> bool:
|
||||
"""Update Supervisor diagnostics toggle.
|
||||
|
||||
The caller of the function should handle HassioAPIError.
|
||||
"""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.update_diagnostics(diagnostics)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@api_data
|
||||
async def async_create_backup(
|
||||
hass: HomeAssistant, payload: dict, partial: bool = False
|
||||
) -> dict:
|
||||
"""Create a full or partial backup.
|
||||
|
||||
The caller of the function should handle HassioAPIError.
|
||||
"""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
backup_type = "partial" if partial else "full"
|
||||
command = f"/backups/new/{backup_type}"
|
||||
return await hassio.send_command(command, payload=payload, timeout=None)
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_get_green_settings(hass: HomeAssistant) -> dict[str, bool]:
|
||||
"""Return settings specific to Home Assistant Green."""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.send_command("/os/boards/green", method="get")
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_set_green_settings(
|
||||
hass: HomeAssistant, settings: dict[str, bool]
|
||||
) -> dict:
|
||||
"""Set settings specific to Home Assistant Green.
|
||||
|
||||
Returns an empty dict.
|
||||
"""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.send_command(
|
||||
"/os/boards/green", method="post", payload=settings
|
||||
)
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_get_yellow_settings(hass: HomeAssistant) -> dict[str, bool]:
|
||||
"""Return settings specific to Home Assistant Yellow."""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.send_command("/os/boards/yellow", method="get")
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_set_yellow_settings(
|
||||
hass: HomeAssistant, settings: dict[str, bool]
|
||||
) -> dict:
|
||||
"""Set settings specific to Home Assistant Yellow.
|
||||
|
||||
Returns an empty dict.
|
||||
"""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.send_command(
|
||||
"/os/boards/yellow", method="post", payload=settings
|
||||
)
|
||||
|
||||
|
||||
class HassIO:
|
||||
"""Small API wrapper for Hass.io."""
|
||||
|
||||
@@ -257,16 +190,6 @@ class HassIO:
|
||||
"/supervisor/options", payload={"timezone": timezone, "country": country}
|
||||
)
|
||||
|
||||
@_api_bool
|
||||
def update_diagnostics(self, diagnostics: bool) -> Coroutine:
|
||||
"""Update Supervisor diagnostics setting.
|
||||
|
||||
This method returns a coroutine.
|
||||
"""
|
||||
return self.send_command(
|
||||
"/supervisor/options", payload={"diagnostics": diagnostics}
|
||||
)
|
||||
|
||||
async def send_command(
|
||||
self,
|
||||
command: str,
|
||||
@@ -341,3 +264,13 @@ def get_supervisor_client(hass: HomeAssistant) -> SupervisorClient:
|
||||
os.environ.get("SUPERVISOR_TOKEN", ""),
|
||||
session=hassio.websession,
|
||||
)
|
||||
|
||||
|
||||
async def async_update_diagnostics(hass: HomeAssistant, diagnostics: bool) -> None:
|
||||
"""Update Supervisor diagnostics toggle.
|
||||
|
||||
The caller of the function should handle SupervisorError.
|
||||
"""
|
||||
await get_supervisor_client(hass).supervisor.set_options(
|
||||
SupervisorOptions(diagnostics=diagnostics)
|
||||
)
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyheos"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyheos==1.0.6"],
|
||||
"requirements": ["pyheos==1.0.5"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-denon-com:device:ACT-Denon:1"
|
||||
|
@@ -99,20 +99,6 @@ CLEANING_MODE_OPTIONS = {
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Silent",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Standard",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Power",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.IntelligentMode",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.VacuumOnly",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.MopOnly",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.VacuumAndMop",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.MopAfterVacuum",
|
||||
)
|
||||
}
|
||||
|
||||
SUCTION_POWER_OPTIONS = {
|
||||
bsh_key_to_translation_key(option): option
|
||||
for option in (
|
||||
"ConsumerProducts.CleaningRobot.EnumType.SuctionPower.Silent",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.SuctionPower.Standard",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.SuctionPower.Max",
|
||||
)
|
||||
}
|
||||
|
||||
@@ -323,10 +309,6 @@ PROGRAM_ENUM_OPTIONS = {
|
||||
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_CLEANING_MODE,
|
||||
CLEANING_MODE_OPTIONS,
|
||||
),
|
||||
(
|
||||
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_SUCTION_POWER,
|
||||
SUCTION_POWER_OPTIONS,
|
||||
),
|
||||
(OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_BEAN_AMOUNT, BEAN_AMOUNT_OPTIONS),
|
||||
(
|
||||
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_COFFEE_TEMPERATURE,
|
||||
|
@@ -30,7 +30,6 @@ from .const import (
|
||||
INTENSIVE_LEVEL_OPTIONS,
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP,
|
||||
SPIN_SPEED_OPTIONS,
|
||||
SUCTION_POWER_OPTIONS,
|
||||
TEMPERATURE_OPTIONS,
|
||||
TRANSLATION_KEYS_PROGRAMS_MAP,
|
||||
VARIO_PERFECT_OPTIONS,
|
||||
@@ -169,16 +168,6 @@ PROGRAM_SELECT_OPTION_ENTITY_DESCRIPTIONS = (
|
||||
for translation_key, value in CLEANING_MODE_OPTIONS.items()
|
||||
},
|
||||
),
|
||||
HomeConnectSelectEntityDescription(
|
||||
key=OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_SUCTION_POWER,
|
||||
translation_key="suction_power",
|
||||
options=list(SUCTION_POWER_OPTIONS),
|
||||
translation_key_values=SUCTION_POWER_OPTIONS,
|
||||
values_translation_key={
|
||||
value: translation_key
|
||||
for translation_key, value in SUCTION_POWER_OPTIONS.items()
|
||||
},
|
||||
),
|
||||
HomeConnectSelectEntityDescription(
|
||||
key=OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_BEAN_AMOUNT,
|
||||
translation_key="bean_amount",
|
||||
|
@@ -202,22 +202,6 @@ set_program_and_options:
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_modes_silent
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_modes_standard
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_modes_power
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_intelligent_mode
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_only
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_only
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_and_mop
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_after_vacuum
|
||||
consumer_products_cleaning_robot_option_suction_power:
|
||||
example: consumer_products_cleaning_robot_enum_type_suction_power_standard
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
mode: dropdown
|
||||
translation_key: suction_power
|
||||
options:
|
||||
- consumer_products_cleaning_robot_enum_type_suction_power_silent
|
||||
- consumer_products_cleaning_robot_enum_type_suction_power_standard
|
||||
- consumer_products_cleaning_robot_enum_type_suction_power_max
|
||||
coffee_maker_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
|
@@ -324,19 +324,7 @@
|
||||
"options": {
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_silent": "Silent",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_standard": "Standard",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_power": "Power",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_intelligent_mode": "Intelligent mode",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_only": "Vacuum only",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_only": "Mop only",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_and_mop": "Vacuum and mop",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_after_vacuum": "Mop after vacuum"
|
||||
}
|
||||
},
|
||||
"suction_power": {
|
||||
"options": {
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_silent": "Silent",
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_standard": "Standard",
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_max": "Max"
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_power": "Power"
|
||||
}
|
||||
},
|
||||
"bean_amount": {
|
||||
@@ -531,10 +519,6 @@
|
||||
"name": "Cleaning mode",
|
||||
"description": "Defines the favored cleaning mode."
|
||||
},
|
||||
"consumer_products_cleaning_robot_option_suction_power": {
|
||||
"name": "Suction power",
|
||||
"description": "Defines the suction power."
|
||||
},
|
||||
"consumer_products_coffee_maker_option_bean_amount": {
|
||||
"name": "Bean amount",
|
||||
"description": "Describes the amount of coffee beans used in a coffee machine program."
|
||||
@@ -1212,20 +1196,7 @@
|
||||
"state": {
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_silent": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_modes_silent%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_standard": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_modes_standard%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_power": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_modes_power%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_intelligent_mode": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_intelligent_mode%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_only": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_only%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_only": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_only%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_and_mop": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_and_mop%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_after_vacuum": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_after_vacuum%]"
|
||||
}
|
||||
},
|
||||
"suction_power": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::consumer_products_cleaning_robot_option_suction_power::name%]",
|
||||
"state": {
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_silent": "[%key:component::home_connect::selector::suction_power::options::consumer_products_cleaning_robot_enum_type_suction_power_silent%]",
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_standard": "[%key:component::home_connect::selector::suction_power::options::consumer_products_cleaning_robot_enum_type_suction_power_standard%]",
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_max": "[%key:component::home_connect::selector::suction_power::options::consumer_products_cleaning_robot_enum_type_suction_power_max%]"
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_power": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_modes_power%]"
|
||||
}
|
||||
},
|
||||
"bean_amount": {
|
||||
|
@@ -6,13 +6,12 @@ import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import (
|
||||
HassioAPIError,
|
||||
async_get_green_settings,
|
||||
async_set_green_settings,
|
||||
GreenOptions,
|
||||
SupervisorError,
|
||||
get_supervisor_client,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
@@ -20,7 +19,7 @@ from homeassistant.config_entries import (
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import HomeAssistant, async_get_hass, callback
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
@@ -49,7 +48,7 @@ class HomeAssistantGreenConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> HomeAssistantGreenOptionsFlow:
|
||||
"""Return the options flow."""
|
||||
return HomeAssistantGreenOptionsFlow()
|
||||
return HomeAssistantGreenOptionsFlow(async_get_hass())
|
||||
|
||||
async def async_step_system(
|
||||
self, data: dict[str, Any] | None = None
|
||||
@@ -63,6 +62,11 @@ class HomeAssistantGreenOptionsFlow(OptionsFlow):
|
||||
|
||||
_hw_settings: dict[str, bool] | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant, *args: Any, **kwargs: Any) -> None:
|
||||
"""Instantiate options flow."""
|
||||
super().__init__(*args, **kwargs)
|
||||
self._supervisor_client = get_supervisor_client(hass)
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -76,27 +80,27 @@ class HomeAssistantGreenOptionsFlow(OptionsFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle hardware settings."""
|
||||
|
||||
if user_input is not None:
|
||||
if self._hw_settings == user_input:
|
||||
return self.async_create_entry(data={})
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
await async_set_green_settings(self.hass, user_input)
|
||||
except (aiohttp.ClientError, TimeoutError, HassioAPIError) as err:
|
||||
await self._supervisor_client.os.set_green_options(
|
||||
GreenOptions.from_dict(user_input)
|
||||
)
|
||||
except (TimeoutError, SupervisorError) as err:
|
||||
_LOGGER.warning("Failed to write hardware settings", exc_info=err)
|
||||
return self.async_abort(reason="write_hw_settings_error")
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
self._hw_settings: dict[str, bool] = await async_get_green_settings(
|
||||
self.hass
|
||||
)
|
||||
except (aiohttp.ClientError, TimeoutError, HassioAPIError) as err:
|
||||
green_info = await self._supervisor_client.os.green_info()
|
||||
except (TimeoutError, SupervisorError) as err:
|
||||
_LOGGER.warning("Failed to read hardware settings", exc_info=err)
|
||||
return self.async_abort(reason="read_hw_settings_error")
|
||||
|
||||
self._hw_settings: dict[str, bool] = green_info.to_dict()
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
STEP_HW_SETTINGS_SCHEMA, self._hw_settings
|
||||
)
|
||||
|
@@ -67,7 +67,7 @@
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"not_hassio_thread": "The OpenThread Border Router add-on can only be installed with Home Assistant OS. If you would like to use the {model} as a Thread border router, please manually set up OpenThread Border Router to communicate with it.",
|
||||
"not_hassio_thread": "The OpenThread Border Router add-on can only be installed with Home Assistant OS. If you would like to use the {model} as a Thread border router, please flash the firmware manually using the [web flasher]({docs_web_flasher_url}) and set up OpenThread Border Router to communicate with it.",
|
||||
"otbr_addon_already_running": "The OpenThread Border Router add-on is already running, it cannot be installed again.",
|
||||
"zha_still_using_stick": "This {model} is in use by the Zigbee Home Automation integration. Please migrate your Zigbee network to another adapter or delete the integration and try again.",
|
||||
"otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again.",
|
||||
|
@@ -7,13 +7,11 @@ import asyncio
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Protocol, final
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import (
|
||||
HassioAPIError,
|
||||
async_get_yellow_settings,
|
||||
async_set_yellow_settings,
|
||||
SupervisorError,
|
||||
YellowOptions,
|
||||
get_supervisor_client,
|
||||
)
|
||||
from homeassistant.components.homeassistant_hardware.firmware_config_flow import (
|
||||
@@ -222,21 +220,22 @@ class BaseHomeAssistantYellowOptionsFlow(OptionsFlow, ABC):
|
||||
return self.async_create_entry(data={})
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
await async_set_yellow_settings(self.hass, user_input)
|
||||
except (aiohttp.ClientError, TimeoutError, HassioAPIError) as err:
|
||||
await self._supervisor_client.os.set_yellow_options(
|
||||
YellowOptions.from_dict(user_input)
|
||||
)
|
||||
except (TimeoutError, SupervisorError) as err:
|
||||
_LOGGER.warning("Failed to write hardware settings", exc_info=err)
|
||||
return self.async_abort(reason="write_hw_settings_error")
|
||||
return await self.async_step_reboot_menu()
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
self._hw_settings: dict[str, bool] = await async_get_yellow_settings(
|
||||
self.hass
|
||||
)
|
||||
except (aiohttp.ClientError, TimeoutError, HassioAPIError) as err:
|
||||
yellow_info = await self._supervisor_client.os.yellow_info()
|
||||
except (TimeoutError, SupervisorError) as err:
|
||||
_LOGGER.warning("Failed to read hardware settings", exc_info=err)
|
||||
return self.async_abort(reason="read_hw_settings_error")
|
||||
|
||||
self._hw_settings: dict[str, bool] = yellow_info.to_dict()
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
STEP_HW_SETTINGS_SCHEMA, self._hw_settings
|
||||
)
|
||||
|
@@ -158,7 +158,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_import_kwh is not None,
|
||||
value_fn=lambda data: data.measurement.energy_import_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_import_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_import_t1_kwh",
|
||||
@@ -172,7 +172,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
data.measurement.energy_import_t1_kwh is not None
|
||||
and data.measurement.energy_export_t2_kwh is not None
|
||||
),
|
||||
value_fn=lambda data: data.measurement.energy_import_t1_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t1_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_import_t2_kwh",
|
||||
@@ -182,7 +182,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_import_t2_kwh is not None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t2_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t2_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_import_t3_kwh",
|
||||
@@ -192,7 +192,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_import_t3_kwh is not None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t3_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t3_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_import_t4_kwh",
|
||||
@@ -202,7 +202,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_import_t4_kwh is not None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t4_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t4_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_kwh",
|
||||
@@ -212,7 +212,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_export_kwh is not None,
|
||||
enabled_fn=lambda data: data.measurement.energy_export_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_export_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_t1_kwh",
|
||||
@@ -227,7 +227,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
and data.measurement.energy_export_t2_kwh is not None
|
||||
),
|
||||
enabled_fn=lambda data: data.measurement.energy_export_t1_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_t1_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_export_t1_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_t2_kwh",
|
||||
@@ -238,7 +238,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_export_t2_kwh is not None,
|
||||
enabled_fn=lambda data: data.measurement.energy_export_t2_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_t2_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_export_t2_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_t3_kwh",
|
||||
@@ -249,7 +249,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_export_t3_kwh is not None,
|
||||
enabled_fn=lambda data: data.measurement.energy_export_t3_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_t3_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_export_t3_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_t4_kwh",
|
||||
@@ -260,7 +260,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_export_t4_kwh is not None,
|
||||
enabled_fn=lambda data: data.measurement.energy_export_t4_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_t4_kwh or None,
|
||||
value_fn=lambda data: data.measurement.energy_export_t4_kwh,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="active_power_w",
|
||||
|
@@ -89,12 +89,12 @@ class AutomowerBaseEntity(CoordinatorEntity[AutomowerDataUpdateCoordinator]):
|
||||
"""Initialize AutomowerEntity."""
|
||||
super().__init__(coordinator)
|
||||
self.mower_id = mower_id
|
||||
parts = self.mower_attributes.system.model.split(maxsplit=2)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, mower_id)},
|
||||
manufacturer=parts[0],
|
||||
model=parts[1],
|
||||
model_id=parts[2],
|
||||
manufacturer="Husqvarna",
|
||||
model=self.mower_attributes.system.model.removeprefix(
|
||||
"HUSQVARNA "
|
||||
).removeprefix("Husqvarna "),
|
||||
name=self.mower_attributes.system.name,
|
||||
serial_number=self.mower_attributes.system.serial_number,
|
||||
suggested_area="Garden",
|
||||
|
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"domain": "iometer",
|
||||
"name": "IOmeter",
|
||||
"codeowners": ["@jukrebs"],
|
||||
"codeowners": ["@MaestroOnICe"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/iometer",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["iometer==0.2.0"],
|
||||
"requirements": ["iometer==0.1.0"],
|
||||
"zeroconf": ["_iometer._tcp.local."]
|
||||
}
|
||||
|
@@ -12,5 +12,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/kegtron",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["kegtron-ble==1.0.2"]
|
||||
"requirements": ["kegtron-ble==0.4.0"]
|
||||
}
|
||||
|
@@ -11,9 +11,9 @@
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"xknx==3.9.1",
|
||||
"xknx==3.9.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.9.185845"
|
||||
"knx-frontend==2025.8.24.205840"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -136,7 +136,7 @@ def _create_sensor(xknx: XKNX, config: ConfigType) -> XknxSensor:
|
||||
name=config[CONF_NAME],
|
||||
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
|
||||
sync_state=config[SensorSchema.CONF_SYNC_STATE],
|
||||
always_callback=True,
|
||||
always_callback=config[SensorSchema.CONF_ALWAYS_CALLBACK],
|
||||
value_type=config[CONF_TYPE],
|
||||
)
|
||||
|
||||
@@ -159,7 +159,7 @@ class KNXSensor(KnxYamlEntity, SensorEntity):
|
||||
SensorDeviceClass, self._device.ha_device_class()
|
||||
)
|
||||
|
||||
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
|
||||
self._attr_force_update = self._device.always_callback
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
|
||||
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
|
||||
|
@@ -134,8 +134,4 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.show_user_form(
|
||||
user_input,
|
||||
errors,
|
||||
description_placeholders={"example_url": "https://mastodon.social"},
|
||||
)
|
||||
return self.show_user_form(user_input, errors)
|
||||
|
@@ -9,7 +9,7 @@
|
||||
"access_token": "[%key:common::config_flow::data::access_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"base_url": "The URL of your Mastodon instance e.g. {example_url}.",
|
||||
"base_url": "The URL of your Mastodon instance e.g. https://mastodon.social.",
|
||||
"client_id": "The client key for the application created within your Mastodon account.",
|
||||
"client_secret": "The client secret for the application created within your Mastodon account.",
|
||||
"access_token": "The access token for the application created within your Mastodon account."
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import IntEnum
|
||||
from typing import Any
|
||||
|
||||
@@ -27,7 +26,7 @@ from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -183,11 +182,6 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.CLIMATE, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterClimateEntityDescription(ClimateEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Climate entities."""
|
||||
|
||||
|
||||
class MatterClimate(MatterEntity, ClimateEntity):
|
||||
"""Representation of a Matter climate entity."""
|
||||
|
||||
@@ -429,7 +423,7 @@ class MatterClimate(MatterEntity, ClimateEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.CLIMATE,
|
||||
entity_description=MatterClimateEntityDescription(
|
||||
entity_description=ClimateEntityDescription(
|
||||
key="MatterThermostat",
|
||||
name=None,
|
||||
),
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import IntEnum
|
||||
from math import floor
|
||||
from typing import Any
|
||||
@@ -23,7 +22,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import LOGGER
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -62,15 +61,10 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.COVER, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterCoverEntityDescription(CoverEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Cover entities."""
|
||||
|
||||
|
||||
class MatterCover(MatterEntity, CoverEntity):
|
||||
"""Representation of a Matter Cover."""
|
||||
|
||||
entity_description: MatterCoverEntityDescription
|
||||
entity_description: CoverEntityDescription
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
@@ -204,7 +198,7 @@ class MatterCover(MatterEntity, CoverEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.COVER,
|
||||
entity_description=MatterCoverEntityDescription(
|
||||
entity_description=CoverEntityDescription(
|
||||
key="MatterCover",
|
||||
name=None,
|
||||
),
|
||||
@@ -220,7 +214,7 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.COVER,
|
||||
entity_description=MatterCoverEntityDescription(
|
||||
entity_description=CoverEntityDescription(
|
||||
key="MatterCoverPositionAwareLift", name=None
|
||||
),
|
||||
entity_class=MatterCover,
|
||||
@@ -235,7 +229,7 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.COVER,
|
||||
entity_description=MatterCoverEntityDescription(
|
||||
entity_description=CoverEntityDescription(
|
||||
key="MatterCoverPositionAwareTilt", name=None
|
||||
),
|
||||
entity_class=MatterCover,
|
||||
@@ -250,7 +244,7 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.COVER,
|
||||
entity_description=MatterCoverEntityDescription(
|
||||
entity_description=CoverEntityDescription(
|
||||
key="MatterCoverPositionAwareLiftAndTilt", name=None
|
||||
),
|
||||
entity_class=MatterCover,
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@@ -19,7 +18,7 @@ from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -47,11 +46,6 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.EVENT, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterEventEntityDescription(EventEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Event entities."""
|
||||
|
||||
|
||||
class MatterEventEntity(MatterEntity, EventEntity):
|
||||
"""Representation of a Matter Event entity."""
|
||||
|
||||
@@ -138,7 +132,7 @@ class MatterEventEntity(MatterEntity, EventEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.EVENT,
|
||||
entity_description=MatterEventEntityDescription(
|
||||
entity_description=EventEntityDescription(
|
||||
key="GenericSwitch",
|
||||
device_class=EventDeviceClass.BUTTON,
|
||||
translation_key="button",
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@@ -19,7 +18,7 @@ from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -53,11 +52,6 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.FAN, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterFanEntityDescription(FanEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Fan entities."""
|
||||
|
||||
|
||||
class MatterFan(MatterEntity, FanEntity):
|
||||
"""Representation of a Matter fan."""
|
||||
|
||||
@@ -314,7 +308,7 @@ class MatterFan(MatterEntity, FanEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.FAN,
|
||||
entity_description=MatterFanEntityDescription(
|
||||
entity_description=FanEntityDescription(
|
||||
key="MatterFan",
|
||||
name=None,
|
||||
),
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@@ -30,7 +29,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
from .util import (
|
||||
@@ -86,15 +85,10 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.LIGHT, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterLightEntityDescription(LightEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Light entities."""
|
||||
|
||||
|
||||
class MatterLight(MatterEntity, LightEntity):
|
||||
"""Representation of a Matter light."""
|
||||
|
||||
entity_description: MatterLightEntityDescription
|
||||
entity_description: LightEntityDescription
|
||||
_supports_brightness = False
|
||||
_supports_color = False
|
||||
_supports_color_temperature = False
|
||||
@@ -464,7 +458,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.LIGHT,
|
||||
entity_description=MatterLightEntityDescription(
|
||||
entity_description=LightEntityDescription(
|
||||
key="MatterLight",
|
||||
name=None,
|
||||
),
|
||||
@@ -493,7 +487,7 @@ DISCOVERY_SCHEMAS = [
|
||||
# Additional schema to match (HS Color) lights with incorrect/missing device type
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.LIGHT,
|
||||
entity_description=MatterLightEntityDescription(
|
||||
entity_description=LightEntityDescription(
|
||||
key="MatterHSColorLightFallback",
|
||||
name=None,
|
||||
),
|
||||
@@ -514,7 +508,7 @@ DISCOVERY_SCHEMAS = [
|
||||
# Additional schema to match (XY Color) lights with incorrect/missing device type
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.LIGHT,
|
||||
entity_description=MatterLightEntityDescription(
|
||||
entity_description=LightEntityDescription(
|
||||
key="MatterXYColorLightFallback",
|
||||
name=None,
|
||||
),
|
||||
@@ -535,7 +529,7 @@ DISCOVERY_SCHEMAS = [
|
||||
# Additional schema to match (color temperature) lights with incorrect/missing device type
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.LIGHT,
|
||||
entity_description=MatterLightEntityDescription(
|
||||
entity_description=LightEntityDescription(
|
||||
key="MatterColorTemperatureLightFallback",
|
||||
name=None,
|
||||
),
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@@ -20,7 +19,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import LOGGER
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -53,11 +52,6 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.LOCK, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterLockEntityDescription(LockEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Lock entities."""
|
||||
|
||||
|
||||
class MatterLock(MatterEntity, LockEntity):
|
||||
"""Representation of a Matter lock."""
|
||||
|
||||
@@ -260,7 +254,7 @@ class MatterLock(MatterEntity, LockEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.LOCK,
|
||||
entity_description=MatterLockEntityDescription(
|
||||
entity_description=LockEntityDescription(
|
||||
key="MatterLock",
|
||||
name=None,
|
||||
),
|
||||
|
@@ -11,8 +11,7 @@ from matter_server.client.models.device_types import DeviceType
|
||||
from matter_server.client.models.node import MatterEndpoint
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
from .entity import MatterEntityDescription
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
type SensorValueTypes = type[
|
||||
clusters.uint | int | clusters.Nullable | clusters.float32 | float
|
||||
@@ -55,7 +54,7 @@ class MatterEntityInfo:
|
||||
attributes_to_watch: list[type[ClusterAttributeDescriptor]]
|
||||
|
||||
# the entity description to use
|
||||
entity_description: MatterEntityDescription
|
||||
entity_description: EntityDescription
|
||||
|
||||
# entity class to use to instantiate the entity
|
||||
entity_class: type
|
||||
@@ -81,7 +80,7 @@ class MatterDiscoverySchema:
|
||||
platform: Platform
|
||||
|
||||
# platform-specific entity description
|
||||
entity_description: MatterEntityDescription
|
||||
entity_description: EntityDescription
|
||||
|
||||
# entity class to use to instantiate the entity
|
||||
entity_class: type
|
||||
|
@@ -42,11 +42,6 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.SWITCH, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterSwitchEntityDescription(SwitchEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Switch entities."""
|
||||
|
||||
|
||||
class MatterSwitch(MatterEntity, SwitchEntity):
|
||||
"""Representation of a Matter switch."""
|
||||
|
||||
@@ -173,7 +168,7 @@ class MatterNumericSwitch(MatterSwitch):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=MatterSwitchEntityDescription(
|
||||
entity_description=SwitchEntityDescription(
|
||||
key="MatterPlug",
|
||||
device_class=SwitchDeviceClass.OUTLET,
|
||||
name=None,
|
||||
@@ -184,7 +179,7 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=MatterSwitchEntityDescription(
|
||||
entity_description=SwitchEntityDescription(
|
||||
key="MatterPowerToggle",
|
||||
device_class=SwitchDeviceClass.SWITCH,
|
||||
translation_key="power",
|
||||
@@ -212,7 +207,7 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=MatterSwitchEntityDescription(
|
||||
entity_description=SwitchEntityDescription(
|
||||
key="MatterSwitch",
|
||||
device_class=SwitchDeviceClass.OUTLET,
|
||||
name=None,
|
||||
|
@@ -25,7 +25,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.restore_state import ExtraStoredData
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -67,11 +67,6 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.UPDATE, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterUpdateEntityDescription(UpdateEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Update entities."""
|
||||
|
||||
|
||||
class MatterUpdate(MatterEntity, UpdateEntity):
|
||||
"""Representation of a Matter node capable of updating."""
|
||||
|
||||
@@ -255,7 +250,7 @@ class MatterUpdate(MatterEntity, UpdateEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.UPDATE,
|
||||
entity_description=MatterUpdateEntityDescription(
|
||||
entity_description=UpdateEntityDescription(
|
||||
key="MatterUpdate", device_class=UpdateDeviceClass.FIRMWARE
|
||||
),
|
||||
entity_class=MatterUpdate,
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import IntEnum
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -21,7 +20,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -59,13 +58,6 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.VACUUM, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterStateVacuumEntityDescription(
|
||||
StateVacuumEntityDescription, MatterEntityDescription
|
||||
):
|
||||
"""Describe Matter Vacuum entities."""
|
||||
|
||||
|
||||
class MatterVacuum(MatterEntity, StateVacuumEntity):
|
||||
"""Representation of a Matter Vacuum cleaner entity."""
|
||||
|
||||
@@ -73,7 +65,7 @@ class MatterVacuum(MatterEntity, StateVacuumEntity):
|
||||
_supported_run_modes: (
|
||||
dict[int, clusters.RvcRunMode.Structs.ModeOptionStruct] | None
|
||||
) = None
|
||||
entity_description: MatterStateVacuumEntityDescription
|
||||
entity_description: StateVacuumEntityDescription
|
||||
_platform_translation_key = "vacuum"
|
||||
|
||||
def _get_run_mode_by_tag(
|
||||
@@ -220,7 +212,7 @@ class MatterVacuum(MatterEntity, StateVacuumEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.VACUUM,
|
||||
entity_description=MatterStateVacuumEntityDescription(
|
||||
entity_description=StateVacuumEntityDescription(
|
||||
key="MatterVacuumCleaner", name=None
|
||||
),
|
||||
entity_class=MatterVacuum,
|
||||
|
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from matter_server.client.models import device_types
|
||||
|
||||
@@ -18,7 +16,7 @@ from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -36,16 +34,11 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.VALVE, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterValveEntityDescription(ValveEntityDescription, MatterEntityDescription):
|
||||
"""Describe Matter Valve entities."""
|
||||
|
||||
|
||||
class MatterValve(MatterEntity, ValveEntity):
|
||||
"""Representation of a Matter Valve."""
|
||||
|
||||
_feature_map: int | None = None
|
||||
entity_description: MatterValveEntityDescription
|
||||
entity_description: ValveEntityDescription
|
||||
_platform_translation_key = "valve"
|
||||
|
||||
async def async_open_valve(self) -> None:
|
||||
@@ -135,7 +128,7 @@ class MatterValve(MatterEntity, ValveEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.VALVE,
|
||||
entity_description=MatterValveEntityDescription(
|
||||
entity_description=ValveEntityDescription(
|
||||
key="MatterValve",
|
||||
device_class=ValveDeviceClass.WATER,
|
||||
name=None,
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, cast
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@@ -27,7 +26,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
@@ -51,13 +50,6 @@ async def async_setup_entry(
|
||||
matter.register_platform_handler(Platform.WATER_HEATER, async_add_entities)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MatterWaterHeaterEntityDescription(
|
||||
WaterHeaterEntityDescription, MatterEntityDescription
|
||||
):
|
||||
"""Describe Matter Water Heater entities."""
|
||||
|
||||
|
||||
class MatterWaterHeater(MatterEntity, WaterHeaterEntity):
|
||||
"""Representation of a Matter WaterHeater entity."""
|
||||
|
||||
@@ -179,7 +171,7 @@ class MatterWaterHeater(MatterEntity, WaterHeaterEntity):
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.WATER_HEATER,
|
||||
entity_description=MatterWaterHeaterEntityDescription(
|
||||
entity_description=WaterHeaterEntityDescription(
|
||||
key="MatterWaterHeater",
|
||||
name=None,
|
||||
),
|
||||
|
@@ -458,7 +458,6 @@ SUBENTRY_PLATFORMS = [
|
||||
Platform.LOCK,
|
||||
Platform.NOTIFY,
|
||||
Platform.NUMBER,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
@@ -1142,7 +1141,6 @@ ENTITY_CONFIG_VALIDATOR: dict[
|
||||
Platform.LOCK.value: None,
|
||||
Platform.NOTIFY.value: None,
|
||||
Platform.NUMBER.value: validate_number_platform_config,
|
||||
Platform.SELECT: None,
|
||||
Platform.SENSOR.value: validate_sensor_platform_config,
|
||||
Platform.SWITCH.value: None,
|
||||
}
|
||||
@@ -1369,7 +1367,6 @@ PLATFORM_ENTITY_FIELDS: dict[str, dict[str, PlatformField]] = {
|
||||
custom_filtering=True,
|
||||
),
|
||||
},
|
||||
Platform.SELECT.value: {},
|
||||
Platform.SENSOR.value: {
|
||||
CONF_DEVICE_CLASS: PlatformField(
|
||||
selector=SENSOR_DEVICE_CLASS_SELECTOR, required=False
|
||||
@@ -3106,34 +3103,6 @@ PLATFORM_MQTT_FIELDS: dict[str, dict[str, PlatformField]] = {
|
||||
),
|
||||
CONF_RETAIN: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
|
||||
},
|
||||
Platform.SELECT.value: {
|
||||
CONF_COMMAND_TOPIC: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
required=True,
|
||||
validator=valid_publish_topic,
|
||||
error="invalid_publish_topic",
|
||||
),
|
||||
CONF_COMMAND_TEMPLATE: PlatformField(
|
||||
selector=TEMPLATE_SELECTOR,
|
||||
required=False,
|
||||
validator=validate(cv.template),
|
||||
error="invalid_template",
|
||||
),
|
||||
CONF_STATE_TOPIC: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
required=False,
|
||||
validator=valid_subscribe_topic,
|
||||
error="invalid_subscribe_topic",
|
||||
),
|
||||
CONF_VALUE_TEMPLATE: PlatformField(
|
||||
selector=TEMPLATE_SELECTOR,
|
||||
required=False,
|
||||
validator=validate(cv.template),
|
||||
error="invalid_template",
|
||||
),
|
||||
CONF_OPTIONS: PlatformField(selector=OPTIONS_SELECTOR, required=True),
|
||||
CONF_RETAIN: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
|
||||
},
|
||||
Platform.SENSOR.value: {
|
||||
CONF_STATE_TOPIC: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
|
@@ -302,6 +302,7 @@ DEFAULT_PAYLOAD_RESET = "None"
|
||||
DEFAULT_PAYLOAD_STOP = "STOP"
|
||||
DEFAULT_PAYLOAD_TRIGGER = "TRIGGER"
|
||||
DEFAULT_PAYLOAD_UNLOCK = "UNLOCK"
|
||||
DEFAULT_PORT = 1883
|
||||
DEFAULT_RETAIN = False
|
||||
DEFAULT_TILT_CLOSED_POSITION = 0
|
||||
DEFAULT_TILT_MAX = 100
|
||||
@@ -312,6 +313,7 @@ DEFAULT_WS_HEADERS: dict[str, str] = {}
|
||||
DEFAULT_WS_PATH = "/"
|
||||
DEFAULT_POSITION_CLOSED = 0
|
||||
DEFAULT_POSITION_OPEN = 100
|
||||
DEFAULT_RETAIN = False
|
||||
DEFAULT_SPEED_RANGE_MAX = 100
|
||||
DEFAULT_SPEED_RANGE_MIN = 1
|
||||
DEFAULT_STATE_LOCKED = "LOCKED"
|
||||
|
@@ -346,7 +346,6 @@
|
||||
"mode_state_template": "Operation mode value template",
|
||||
"on_command_type": "ON command type",
|
||||
"optimistic": "Optimistic",
|
||||
"options": "Set options",
|
||||
"payload_off": "Payload \"off\"",
|
||||
"payload_on": "Payload \"on\"",
|
||||
"payload_press": "Payload \"press\"",
|
||||
@@ -394,7 +393,6 @@
|
||||
"mode_state_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the operation mode state. [Learn more.]({url}#mode_state_template)",
|
||||
"on_command_type": "Defines when the payload \"on\" is sent. Using \"Last\" (the default) will send any style (brightness, color, etc) topics first and then a payload \"on\" to the command topic. Using \"First\" will send the payload \"on\" and then any style topics. Using \"Brightness\" will only send brightness commands instead of the payload \"on\" to turn the light on.",
|
||||
"optimistic": "Flag that defines if the {platform} entity works in optimistic mode. [Learn more.]({url}#optimistic)",
|
||||
"options": "List of options that can be selected.",
|
||||
"payload_off": "The payload that represents the \"off\" state.",
|
||||
"payload_on": "The payload that represents the \"on\" state.",
|
||||
"payload_press": "The payload to send when the button is triggered.",
|
||||
@@ -1336,7 +1334,6 @@
|
||||
"lock": "[%key:component::lock::title%]",
|
||||
"notify": "[%key:component::notify::title%]",
|
||||
"number": "[%key:component::number::title%]",
|
||||
"select": "[%key:component::select::title%]",
|
||||
"sensor": "[%key:component::sensor::title%]",
|
||||
"switch": "[%key:component::switch::title%]"
|
||||
}
|
||||
|
@@ -73,13 +73,6 @@ STEP_MODBUS_DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
STEP_MODBUS_PLACEHOLDERS = {
|
||||
"tcp": "tcp://[HOST]:[PORT]",
|
||||
"serial": "serial://[LOCAL DEVICE]",
|
||||
"rfc2217": "rfc2217://[HOST]:[PORT]",
|
||||
}
|
||||
|
||||
|
||||
class FieldError(Exception):
|
||||
"""Field with invalid data."""
|
||||
|
||||
@@ -190,9 +183,7 @@ class NibeHeatPumpConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the modbus step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="modbus",
|
||||
data_schema=STEP_MODBUS_DATA_SCHEMA,
|
||||
description_placeholders=STEP_MODBUS_PLACEHOLDERS,
|
||||
step_id="modbus", data_schema=STEP_MODBUS_DATA_SCHEMA
|
||||
)
|
||||
|
||||
errors = {}
|
||||
@@ -209,10 +200,7 @@ class NibeHeatPumpConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title=title, data=data)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="modbus",
|
||||
data_schema=STEP_MODBUS_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders=STEP_MODBUS_PLACEHOLDERS,
|
||||
step_id="modbus", data_schema=STEP_MODBUS_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_nibegw(
|
||||
|
@@ -15,7 +15,7 @@
|
||||
"modbus_unit": "Modbus unit identifier"
|
||||
},
|
||||
"data_description": {
|
||||
"modbus_url": "Modbus URL that describes the connection to your heat pump or MODBUS40 unit. It should be in the form:\n - `{tcp}` for Modbus TCP connection\n - `{serial}` for a local Modbus RTU connection\n - `{rfc2217}` for a remote Telnet-based Modbus RTU connection.",
|
||||
"modbus_url": "Modbus URL that describes the connection to your heat pump or MODBUS40 unit. It should be in the form:\n - `tcp://[HOST]:[PORT]` for Modbus TCP connection\n - `serial://[LOCAL DEVICE]` for a local Modbus RTU connection\n - `rfc2217://[HOST]:[PORT]` for a remote Telnet-based Modbus RTU connection.",
|
||||
"modbus_unit": "Unit identification for your heat pump. Can usually be left at 0."
|
||||
}
|
||||
},
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["nikohomecontrol"],
|
||||
"requirements": ["nhc==0.7.0"]
|
||||
"requirements": ["nhc==0.6.1"]
|
||||
}
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""The Nintendo Switch parental controls integration."""
|
||||
"""The Nintendo Switch Parental Controls integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -10,19 +10,19 @@ from pynintendoparental.exceptions import (
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_SESSION_TOKEN, DOMAIN
|
||||
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
|
||||
from .coordinator import NintendoParentalConfigEntry, NintendoUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.TIME]
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: NintendoParentalControlsConfigEntry
|
||||
hass: HomeAssistant, entry: NintendoParentalConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Nintendo Switch parental controls from a config entry."""
|
||||
"""Set up Nintendo Switch Parental Controls from a config entry."""
|
||||
try:
|
||||
nintendo_auth = await Authenticator.complete_login(
|
||||
auth=None,
|
||||
@@ -31,7 +31,7 @@ async def async_setup_entry(
|
||||
client_session=async_get_clientsession(hass),
|
||||
)
|
||||
except (InvalidSessionTokenException, InvalidOAuthConfigurationException) as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_expired",
|
||||
) from err
|
||||
@@ -45,7 +45,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: NintendoParentalControlsConfigEntry
|
||||
hass: HomeAssistant, entry: NintendoParentalConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
@@ -1,8 +1,7 @@
|
||||
"""Config flow for the Nintendo Switch parental controls integration."""
|
||||
"""Config flow for the Nintendo Switch Parental Controls integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -20,7 +19,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NintendoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Nintendo Switch parental controls."""
|
||||
"""Handle a config flow for Nintendo Switch Parental Controls."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new config flow instance."""
|
||||
@@ -60,41 +59,3 @@ class NintendoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication on an API error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
if self.auth is None:
|
||||
self.auth = Authenticator.generate_login(
|
||||
client_session=async_get_clientsession(self.hass)
|
||||
)
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self.auth.complete_login(
|
||||
self.auth, user_input[CONF_API_TOKEN], False
|
||||
)
|
||||
except (ValueError, InvalidSessionTokenException, HttpException):
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data={
|
||||
**reauth_entry.data,
|
||||
CONF_SESSION_TOKEN: self.auth.get_session_token,
|
||||
},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={"link": self.auth.login_url},
|
||||
data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}),
|
||||
errors=errors,
|
||||
)
|
5
homeassistant/components/nintendo_parental/const.py
Normal file
5
homeassistant/components/nintendo_parental/const.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Constants for the Nintendo Switch Parental Controls integration."""
|
||||
|
||||
DOMAIN = "nintendo_parental"
|
||||
CONF_UPDATE_INTERVAL = "update_interval"
|
||||
CONF_SESSION_TOKEN = "session_token"
|
@@ -1,4 +1,4 @@
|
||||
"""Nintendo parental controls data coordinator."""
|
||||
"""Nintendo Parental Controls data coordinator."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
type NintendoParentalControlsConfigEntry = ConfigEntry[NintendoUpdateCoordinator]
|
||||
type NintendoParentalConfigEntry = ConfigEntry[NintendoUpdateCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPDATE_INTERVAL = timedelta(seconds=60)
|
||||
@@ -28,7 +28,7 @@ class NintendoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
authenticator: Authenticator,
|
||||
config_entry: NintendoParentalControlsConfigEntry,
|
||||
config_entry: NintendoParentalConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize update coordinator."""
|
||||
super().__init__(
|
@@ -1,4 +1,4 @@
|
||||
"""Base entity definition for Nintendo parental controls."""
|
||||
"""Base entity definition for Nintendo Parental."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"domain": "nintendo_parental_controls",
|
||||
"name": "Nintendo Switch parental controls",
|
||||
"domain": "nintendo_parental",
|
||||
"name": "Nintendo Switch Parental Controls",
|
||||
"codeowners": ["@pantherale0"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/nintendo_parental_controls",
|
||||
"documentation": "https://www.home-assistant.io/integrations/nintendo_parental",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynintendoparental"],
|
||||
"quality_scale": "bronze",
|
@@ -1,4 +1,4 @@
|
||||
"""Sensor platform for Nintendo parental controls."""
|
||||
"""Sensor platform for Nintendo Parental."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -16,39 +16,39 @@ from homeassistant.const import UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
|
||||
from .coordinator import NintendoParentalConfigEntry, NintendoUpdateCoordinator
|
||||
from .entity import Device, NintendoDevice
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class NintendoParentalControlsSensor(StrEnum):
|
||||
"""Store keys for Nintendo parental controls sensors."""
|
||||
class NintendoParentalSensor(StrEnum):
|
||||
"""Store keys for Nintendo Parental sensors."""
|
||||
|
||||
PLAYING_TIME = "playing_time"
|
||||
TIME_REMAINING = "time_remaining"
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class NintendoParentalControlsSensorEntityDescription(SensorEntityDescription):
|
||||
"""Description for Nintendo parental controls sensor entities."""
|
||||
class NintendoParentalSensorEntityDescription(SensorEntityDescription):
|
||||
"""Description for Nintendo Parental sensor entities."""
|
||||
|
||||
value_fn: Callable[[Device], int | float | None]
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[NintendoParentalControlsSensorEntityDescription, ...] = (
|
||||
NintendoParentalControlsSensorEntityDescription(
|
||||
key=NintendoParentalControlsSensor.PLAYING_TIME,
|
||||
translation_key=NintendoParentalControlsSensor.PLAYING_TIME,
|
||||
SENSOR_DESCRIPTIONS: tuple[NintendoParentalSensorEntityDescription, ...] = (
|
||||
NintendoParentalSensorEntityDescription(
|
||||
key=NintendoParentalSensor.PLAYING_TIME,
|
||||
translation_key=NintendoParentalSensor.PLAYING_TIME,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda device: device.today_playing_time,
|
||||
),
|
||||
NintendoParentalControlsSensorEntityDescription(
|
||||
key=NintendoParentalControlsSensor.TIME_REMAINING,
|
||||
translation_key=NintendoParentalControlsSensor.TIME_REMAINING,
|
||||
NintendoParentalSensorEntityDescription(
|
||||
key=NintendoParentalSensor.TIME_REMAINING,
|
||||
translation_key=NintendoParentalSensor.TIME_REMAINING,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
@@ -59,27 +59,27 @@ SENSOR_DESCRIPTIONS: tuple[NintendoParentalControlsSensorEntityDescription, ...]
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: NintendoParentalControlsConfigEntry,
|
||||
entry: NintendoParentalConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
async_add_devices(
|
||||
NintendoParentalControlsSensorEntity(entry.runtime_data, device, sensor)
|
||||
NintendoParentalSensorEntity(entry.runtime_data, device, sensor)
|
||||
for device in entry.runtime_data.api.devices.values()
|
||||
for sensor in SENSOR_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class NintendoParentalControlsSensorEntity(NintendoDevice, SensorEntity):
|
||||
class NintendoParentalSensorEntity(NintendoDevice, SensorEntity):
|
||||
"""Represent a single sensor."""
|
||||
|
||||
entity_description: NintendoParentalControlsSensorEntityDescription
|
||||
entity_description: NintendoParentalSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NintendoUpdateCoordinator,
|
||||
device: Device,
|
||||
description: NintendoParentalControlsSensorEntityDescription,
|
||||
description: NintendoParentalSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator=coordinator, device=device, key=description.key)
|
@@ -9,15 +9,6 @@
|
||||
"data_description": {
|
||||
"api_token": "The link copied from the Nintendo website"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "To obtain your access token, click [Nintendo Login]({link}) to sign in to your Nintendo account. Then, for the account you want to link, right-click on the red **Select this person** button and choose **Copy Link Address**.",
|
||||
"data": {
|
||||
"api_token": "Access token"
|
||||
},
|
||||
"data_description": {
|
||||
"api_token": "The link copied from the Nintendo website"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -26,8 +17,7 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
@@ -38,19 +28,11 @@
|
||||
"time_remaining": {
|
||||
"name": "Screen time remaining"
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"bedtime_alarm": {
|
||||
"name": "Bedtime alarm"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_expired": {
|
||||
"message": "Authentication token expired."
|
||||
},
|
||||
"bedtime_alarm_out_of_range": {
|
||||
"message": "{value} not accepted. Bedtime Alarm must be between {bedtime_alarm_min} and {bedtime_alarm_max}. To disable, set to {bedtime_alarm_disable}."
|
||||
"message": "Authentication expired. Please remove and re-add the integration to reconnect."
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,9 +0,0 @@
|
||||
"""Constants for the Nintendo Switch parental controls integration."""
|
||||
|
||||
DOMAIN = "nintendo_parental_controls"
|
||||
CONF_UPDATE_INTERVAL = "update_interval"
|
||||
CONF_SESSION_TOKEN = "session_token"
|
||||
|
||||
BEDTIME_ALARM_MIN = "16:00"
|
||||
BEDTIME_ALARM_MAX = "23:00"
|
||||
BEDTIME_ALARM_DISABLE = "00:00"
|
@@ -1,100 +0,0 @@
|
||||
"""Time platform for Nintendo parental controls."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from datetime import time
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pynintendoparental.exceptions import BedtimeOutOfRangeError
|
||||
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import BEDTIME_ALARM_DISABLE, BEDTIME_ALARM_MAX, BEDTIME_ALARM_MIN, DOMAIN
|
||||
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
|
||||
from .entity import Device, NintendoDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class NintendoParentalControlsTime(StrEnum):
|
||||
"""Store keys for Nintendo Parental time."""
|
||||
|
||||
BEDTIME_ALARM = "bedtime_alarm"
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class NintendoParentalControlsTimeEntityDescription(TimeEntityDescription):
|
||||
"""Description for Nintendo Parental time entities."""
|
||||
|
||||
value_fn: Callable[[Device], time | None]
|
||||
set_value_fn: Callable[[Device, time], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
TIME_DESCRIPTIONS: tuple[NintendoParentalControlsTimeEntityDescription, ...] = (
|
||||
NintendoParentalControlsTimeEntityDescription(
|
||||
key=NintendoParentalControlsTime.BEDTIME_ALARM,
|
||||
translation_key=NintendoParentalControlsTime.BEDTIME_ALARM,
|
||||
value_fn=lambda device: device.bedtime_alarm,
|
||||
set_value_fn=lambda device, value: device.set_bedtime_alarm(value=value),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: NintendoParentalControlsConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the time platform."""
|
||||
async_add_devices(
|
||||
NintendoParentalControlsTimeEntity(entry.runtime_data, device, entity)
|
||||
for device in entry.runtime_data.api.devices.values()
|
||||
for entity in TIME_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class NintendoParentalControlsTimeEntity(NintendoDevice, TimeEntity):
|
||||
"""Represent a single time entity."""
|
||||
|
||||
entity_description: NintendoParentalControlsTimeEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NintendoUpdateCoordinator,
|
||||
device: Device,
|
||||
description: NintendoParentalControlsTimeEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the time entity."""
|
||||
super().__init__(coordinator=coordinator, device=device, key=description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> time | None:
|
||||
"""Return the time."""
|
||||
return self.entity_description.value_fn(self._device)
|
||||
|
||||
async def async_set_value(self, value: time) -> None:
|
||||
"""Update the value."""
|
||||
try:
|
||||
await self.entity_description.set_value_fn(self._device, value)
|
||||
except BedtimeOutOfRangeError as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="bedtime_alarm_out_of_range",
|
||||
translation_placeholders={
|
||||
"value": value.strftime("%H:%M"),
|
||||
"bedtime_alarm_max": BEDTIME_ALARM_MAX,
|
||||
"bedtime_alarm_min": BEDTIME_ALARM_MIN,
|
||||
"bedtime_alarm_disable": BEDTIME_ALARM_DISABLE,
|
||||
},
|
||||
) from exc
|
@@ -124,7 +124,7 @@ class NumberDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppm` (parts per million), `mg/m³`
|
||||
Unit of measurement: `ppm` (parts per million)
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -475,10 +475,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure),
|
||||
NumberDeviceClass.BATTERY: {PERCENTAGE},
|
||||
NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
NumberDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity),
|
||||
NumberDeviceClass.CURRENT: set(UnitOfElectricCurrent),
|
||||
|
@@ -40,10 +40,7 @@ class OpenRouterAITaskEntity(
|
||||
"""OpenRouter AI Task entity."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_supported_features = (
|
||||
ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
|
||||
)
|
||||
_attr_supported_features = ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
|
@@ -2,17 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
import json
|
||||
from mimetypes import guess_file_type
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
|
||||
import openai
|
||||
from openai.types.chat import (
|
||||
ChatCompletionAssistantMessageParam,
|
||||
ChatCompletionContentPartImageParam,
|
||||
ChatCompletionFunctionToolParam,
|
||||
ChatCompletionMessage,
|
||||
ChatCompletionMessageFunctionToolCallParam,
|
||||
@@ -30,7 +26,6 @@ from voluptuous_openapi import convert
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@@ -170,43 +165,6 @@ async def _transform_response(
|
||||
yield data
|
||||
|
||||
|
||||
async def async_prepare_files_for_prompt(
|
||||
hass: HomeAssistant, files: list[tuple[Path, str | None]]
|
||||
) -> list[ChatCompletionContentPartImageParam]:
|
||||
"""Append files to a prompt.
|
||||
|
||||
Caller needs to ensure that the files are allowed.
|
||||
"""
|
||||
|
||||
def append_files_to_content() -> list[ChatCompletionContentPartImageParam]:
|
||||
content: list[ChatCompletionContentPartImageParam] = []
|
||||
|
||||
for file_path, mime_type in files:
|
||||
if not file_path.exists():
|
||||
raise HomeAssistantError(f"`{file_path}` does not exist")
|
||||
|
||||
if mime_type is None:
|
||||
mime_type = guess_file_type(file_path)[0]
|
||||
|
||||
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
|
||||
raise HomeAssistantError(
|
||||
"Only images and PDF are supported by the OpenRouter API, "
|
||||
f"`{file_path}` is not an image file or PDF"
|
||||
)
|
||||
|
||||
base64_file = base64.b64encode(file_path.read_bytes()).decode("utf-8")
|
||||
content.append(
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {"url": f"data:{mime_type};base64,{base64_file}"},
|
||||
}
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
return await hass.async_add_executor_job(append_files_to_content)
|
||||
|
||||
|
||||
class OpenRouterEntity(Entity):
|
||||
"""Base entity for Open Router."""
|
||||
|
||||
@@ -258,24 +216,6 @@ class OpenRouterEntity(Entity):
|
||||
if (m := _convert_content_to_chat_message(content))
|
||||
]
|
||||
|
||||
last_content = chat_log.content[-1]
|
||||
|
||||
# Handle attachments by adding them to the last user message
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
last_message: ChatCompletionMessageParam = model_args["messages"][-1]
|
||||
assert last_message["role"] == "user" and isinstance(
|
||||
last_message["content"], str
|
||||
)
|
||||
# Encode files with base64 and append them to the text prompt
|
||||
files = await async_prepare_files_for_prompt(
|
||||
self.hass,
|
||||
[(a.path, a.mime_type) for a in last_content.attachments],
|
||||
)
|
||||
last_message["content"] = [
|
||||
{"type": "text", "text": last_message["content"]},
|
||||
*files,
|
||||
]
|
||||
|
||||
if structure:
|
||||
if TYPE_CHECKING:
|
||||
assert structure_name is not None
|
||||
|
@@ -16,13 +16,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_IMAGE_MODEL,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_IMAGE_MODEL,
|
||||
UNSUPPORTED_IMAGE_MODELS,
|
||||
)
|
||||
from .const import CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL, UNSUPPORTED_IMAGE_MODELS
|
||||
from .entity import OpenAIBaseLLMEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -148,7 +142,7 @@ class OpenAITaskEntity(
|
||||
mime_type=mime_type,
|
||||
width=int(width) if width else None,
|
||||
height=int(height) if height else None,
|
||||
model=self.subentry.data.get(CONF_IMAGE_MODEL, RECOMMENDED_IMAGE_MODEL),
|
||||
model="gpt-image-1",
|
||||
revised_prompt=image_call.revised_prompt
|
||||
if hasattr(image_call, "revised_prompt")
|
||||
else None,
|
||||
|
@@ -43,7 +43,6 @@ from homeassistant.helpers.typing import VolDictType
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_CODE_INTERPRETER,
|
||||
CONF_IMAGE_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_REASONING_EFFORT,
|
||||
@@ -65,7 +64,6 @@ from .const import (
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_CODE_INTERPRETER,
|
||||
RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
RECOMMENDED_IMAGE_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
@@ -74,7 +72,6 @@ from .const import (
|
||||
RECOMMENDED_WEB_SEARCH,
|
||||
RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE,
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
UNSUPPORTED_IMAGE_MODELS,
|
||||
UNSUPPORTED_MODELS,
|
||||
UNSUPPORTED_WEB_SEARCH_MODELS,
|
||||
)
|
||||
@@ -414,18 +411,6 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
)
|
||||
}
|
||||
|
||||
if self._subentry_type == "ai_task_data" and not model.startswith(
|
||||
tuple(UNSUPPORTED_IMAGE_MODELS)
|
||||
):
|
||||
step_schema[
|
||||
vol.Optional(CONF_IMAGE_MODEL, default=RECOMMENDED_IMAGE_MODEL)
|
||||
] = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=["gpt-image-1", "gpt-image-1-mini"],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
if user_input.get(CONF_WEB_SEARCH):
|
||||
if user_input.get(CONF_WEB_SEARCH_USER_LOCATION):
|
||||
|
@@ -13,7 +13,6 @@ DEFAULT_AI_TASK_NAME = "OpenAI AI Task"
|
||||
DEFAULT_NAME = "OpenAI Conversation"
|
||||
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
CONF_IMAGE_MODEL = "image_model"
|
||||
CONF_CODE_INTERPRETER = "code_interpreter"
|
||||
CONF_FILENAMES = "filenames"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
@@ -32,7 +31,6 @@ CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
RECOMMENDED_CODE_INTERPRETER = False
|
||||
RECOMMENDED_CHAT_MODEL = "gpt-4o-mini"
|
||||
RECOMMENDED_IMAGE_MODEL = "gpt-image-1"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
RECOMMENDED_REASONING_EFFORT = "low"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
|
@@ -67,7 +67,6 @@ from homeassistant.util import slugify
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_CODE_INTERPRETER,
|
||||
CONF_IMAGE_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_REASONING_EFFORT,
|
||||
CONF_TEMPERATURE,
|
||||
@@ -83,7 +82,6 @@ from .const import (
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_IMAGE_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
@@ -518,15 +516,13 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
model_args.setdefault("include", []).append("code_interpreter_call.outputs") # type: ignore[union-attr]
|
||||
|
||||
if force_image:
|
||||
image_model = options.get(CONF_IMAGE_MODEL, RECOMMENDED_IMAGE_MODEL)
|
||||
image_tool = ImageGeneration(
|
||||
type="image_generation",
|
||||
model=image_model,
|
||||
output_format="png",
|
||||
tools.append(
|
||||
ImageGeneration(
|
||||
type="image_generation",
|
||||
input_fidelity="high",
|
||||
output_format="png",
|
||||
)
|
||||
)
|
||||
if image_model == "gpt-image-1":
|
||||
image_tool["input_fidelity"] = "high"
|
||||
tools.append(image_tool)
|
||||
model_args["tool_choice"] = ToolChoiceTypesParam(type="image_generation")
|
||||
model_args["store"] = True # Avoid sending image data back and forth
|
||||
|
||||
|
@@ -50,7 +50,6 @@
|
||||
"data": {
|
||||
"code_interpreter": "Enable code interpreter tool",
|
||||
"reasoning_effort": "Reasoning effort",
|
||||
"image_model": "Image generation model",
|
||||
"web_search": "Enable web search",
|
||||
"search_context_size": "Search context size",
|
||||
"user_location": "Include home location"
|
||||
@@ -58,7 +57,6 @@
|
||||
"data_description": {
|
||||
"code_interpreter": "This tool, also known as the python tool to the model, allows it to run code to answer questions",
|
||||
"reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt",
|
||||
"image_model": "The model to use when generating images",
|
||||
"web_search": "Allow the model to search the web for the latest information before generating a response",
|
||||
"search_context_size": "High level guidance for the amount of context window space to use for the search",
|
||||
"user_location": "Refine search results based on geography"
|
||||
@@ -99,14 +97,12 @@
|
||||
"title": "[%key:component::openai_conversation::config_subentries::conversation::step::model::title%]",
|
||||
"data": {
|
||||
"reasoning_effort": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::reasoning_effort%]",
|
||||
"image_model": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::image_model%]",
|
||||
"web_search": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::web_search%]",
|
||||
"search_context_size": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::search_context_size%]",
|
||||
"user_location": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::user_location%]"
|
||||
},
|
||||
"data_description": {
|
||||
"reasoning_effort": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::reasoning_effort%]",
|
||||
"image_model": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::image_model%]",
|
||||
"web_search": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::web_search%]",
|
||||
"search_context_size": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::search_context_size%]",
|
||||
"user_location": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::user_location%]"
|
||||
|
@@ -14,7 +14,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"not_supported": "Device not supported",
|
||||
"no_devices_found": "No devices found",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
|
@@ -75,9 +75,6 @@ async def _title(hass: HomeAssistant, discovery_info: HassioServiceInfo) -> str:
|
||||
if device and ("Connect_ZBT-1" in device or "SkyConnect" in device):
|
||||
return f"Home Assistant Connect ZBT-1 ({discovery_info.name})"
|
||||
|
||||
if device and "Nabu_Casa_ZBT-2" in device:
|
||||
return f"Home Assistant Connect ZBT-2 ({discovery_info.name})"
|
||||
|
||||
return discovery_info.name
|
||||
|
||||
|
||||
|
@@ -210,9 +210,7 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the local authentication step via config flow."""
|
||||
errors = {}
|
||||
description_placeholders = {
|
||||
"somfy-developer-mode-docs": "https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started"
|
||||
}
|
||||
description_placeholders = {}
|
||||
|
||||
if user_input:
|
||||
self._host = user_input[CONF_HOST]
|
||||
|
@@ -13,7 +13,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
||||
"requirements": ["pyoverkiz==1.19.0"],
|
||||
"requirements": ["pyoverkiz==1.17.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_kizbox._tcp.local.",
|
||||
|
@@ -32,7 +32,7 @@
|
||||
}
|
||||
},
|
||||
"local": {
|
||||
"description": "By activating the [Developer Mode of your TaHoma box]({somfy-developer-mode-docs}), you can authorize third-party software (like Home Assistant) to connect to it via your local network.\n\n1. Open the TaHoma By Somfy application on your device.\n2. Navigate to the Help & advanced features -> Advanced features menu in the application.\n3. Activate Developer Mode by tapping 7 times on the version number of your gateway (like 2025.1.4-11).\n4. Generate a token from the Developer Mode menu to authenticate your API calls.\n\n5. Enter the generated token below and update the host to include your Gateway PIN or the IP address of your gateway.",
|
||||
"description": "By activating the [Developer Mode of your TaHoma box](https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started), you can authorize third-party software (like Home Assistant) to connect to it via your local network.\n\n1. Open the TaHoma By Somfy application on your device.\n2. Navigate to the Help & advanced features -> Advanced features menu in the application.\n3. Activate Developer Mode by tapping 7 times on the version number of your gateway (like 2025.1.4-11).\n4. Generate a token from the Developer Mode menu to authenticate your API calls.\n\n5. Enter the generated token below and update the host to include your Gateway PIN or the IP address of your gateway.",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"token": "[%key:common::config_flow::data::api_token%]",
|
||||
|
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["plugwise"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["plugwise==1.8.0"],
|
||||
"requirements": ["plugwise==1.7.8"],
|
||||
"zeroconf": ["_plugwise._tcp.local."]
|
||||
}
|
||||
|
@@ -7,13 +7,11 @@ from typing import Any
|
||||
|
||||
from pooldose.client import PooldoseClient
|
||||
from pooldose.request_status import RequestStatus
|
||||
from pooldose.type_definitions import APIVersionResponse
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -40,9 +38,9 @@ class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def _validate_host(
|
||||
self, host: str
|
||||
) -> tuple[str | None, APIVersionResponse | None, dict[str, str] | None]:
|
||||
) -> tuple[str | None, dict[str, str] | None, dict[str, str] | None]:
|
||||
"""Validate the host and return (serial_number, api_versions, errors)."""
|
||||
client = PooldoseClient(host, websession=async_get_clientsession(self.hass))
|
||||
client = PooldoseClient(host)
|
||||
client_status = await client.connect()
|
||||
if client_status == RequestStatus.HOST_UNREACHABLE:
|
||||
return None, None, {"base": "cannot_connect"}
|
||||
@@ -126,14 +124,7 @@ class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors=errors,
|
||||
# Handle API version info for error display; pass version info when available
|
||||
# or None when api_versions is None to avoid displaying version details
|
||||
description_placeholders={
|
||||
"api_version_is": api_versions.get("api_version_is") or "",
|
||||
"api_version_should": api_versions.get("api_version_should") or "",
|
||||
}
|
||||
if api_versions
|
||||
else None,
|
||||
description_placeholders=api_versions,
|
||||
)
|
||||
|
||||
await self.async_set_unique_id(serial_number, raise_on_progress=False)
|
||||
|
@@ -4,10 +4,10 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pooldose.client import PooldoseClient
|
||||
from pooldose.request_status import RequestStatus
|
||||
from pooldose.type_definitions import DeviceInfoDict, StructuredValuesDict
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -18,10 +18,10 @@ _LOGGER = logging.getLogger(__name__)
|
||||
type PooldoseConfigEntry = ConfigEntry[PooldoseCoordinator]
|
||||
|
||||
|
||||
class PooldoseCoordinator(DataUpdateCoordinator[StructuredValuesDict]):
|
||||
class PooldoseCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Coordinator for PoolDose integration."""
|
||||
|
||||
device_info: DeviceInfoDict
|
||||
device_info: dict[str, Any]
|
||||
config_entry: PooldoseConfigEntry
|
||||
|
||||
def __init__(
|
||||
@@ -46,7 +46,7 @@ class PooldoseCoordinator(DataUpdateCoordinator[StructuredValuesDict]):
|
||||
self.device_info = self.client.device_info
|
||||
_LOGGER.debug("Device info: %s", self.device_info)
|
||||
|
||||
async def _async_update_data(self) -> StructuredValuesDict:
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Fetch data from the PoolDose API."""
|
||||
try:
|
||||
status, instant_values = await self.client.instant_values_structured()
|
||||
@@ -62,7 +62,7 @@ class PooldoseCoordinator(DataUpdateCoordinator[StructuredValuesDict]):
|
||||
if status != RequestStatus.SUCCESS:
|
||||
raise UpdateFailed(f"API returned status: {status}")
|
||||
|
||||
if not instant_values:
|
||||
if instant_values is None:
|
||||
raise UpdateFailed("No data received from API")
|
||||
|
||||
_LOGGER.debug("Instant values structured: %s", instant_values)
|
||||
|
@@ -2,9 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from pooldose.type_definitions import DeviceInfoDict, ValueDict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -16,13 +14,13 @@ from .coordinator import PooldoseCoordinator
|
||||
|
||||
|
||||
def device_info(
|
||||
info: DeviceInfoDict | None, unique_id: str, mac: str | None = None
|
||||
info: dict | None, unique_id: str, mac: str | None = None
|
||||
) -> DeviceInfo:
|
||||
"""Create device info for PoolDose devices."""
|
||||
if info is None:
|
||||
info = {}
|
||||
|
||||
api_version = (info.get("API_VERSION") or "").removesuffix("/")
|
||||
api_version = info.get("API_VERSION", "").removesuffix("/")
|
||||
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
@@ -53,9 +51,9 @@ class PooldoseEntity(CoordinatorEntity[PooldoseCoordinator]):
|
||||
self,
|
||||
coordinator: PooldoseCoordinator,
|
||||
serial_number: str,
|
||||
device_properties: DeviceInfoDict,
|
||||
device_properties: dict[str, Any],
|
||||
entity_description: EntityDescription,
|
||||
platform_name: Literal["sensor", "switch", "number", "binary_sensor", "select"],
|
||||
platform_name: str,
|
||||
) -> None:
|
||||
"""Initialize PoolDose entity."""
|
||||
super().__init__(coordinator)
|
||||
@@ -68,7 +66,18 @@ class PooldoseEntity(CoordinatorEntity[PooldoseCoordinator]):
|
||||
coordinator.config_entry.data.get(CONF_MAC),
|
||||
)
|
||||
|
||||
def get_data(self) -> ValueDict | None:
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if the entity is available."""
|
||||
if not super().available or self.coordinator.data is None:
|
||||
return False
|
||||
# Check if the entity type exists in coordinator data
|
||||
platform_data = self.coordinator.data.get(self.platform_name, {})
|
||||
return self.entity_description.key in platform_data
|
||||
|
||||
def get_data(self) -> dict | None:
|
||||
"""Get data for this entity, only if available."""
|
||||
platform_data = self.coordinator.data[self.platform_name]
|
||||
if not self.available:
|
||||
return None
|
||||
platform_data = self.coordinator.data.get(self.platform_name, {})
|
||||
return platform_data.get(self.entity_description.key)
|
||||
|
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/pooldose",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-pooldose==0.7.0"]
|
||||
"requirements": ["python-pooldose==0.5.0"]
|
||||
}
|
||||
|
@@ -48,7 +48,7 @@ rules:
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
@@ -72,5 +72,5 @@ rules:
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
|
@@ -19,6 +19,8 @@ from .entity import PooldoseEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_NAME = "sensor"
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
@@ -144,16 +146,18 @@ async def async_setup_entry(
|
||||
assert config_entry.unique_id is not None
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
sensor_data = coordinator.data["sensor"]
|
||||
data = coordinator.data
|
||||
serial_number = config_entry.unique_id
|
||||
|
||||
sensor_data = data.get(PLATFORM_NAME, {}) if data else {}
|
||||
|
||||
async_add_entities(
|
||||
PooldoseSensor(
|
||||
coordinator,
|
||||
serial_number,
|
||||
coordinator.device_info,
|
||||
description,
|
||||
"sensor",
|
||||
PLATFORM_NAME,
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
if description.key in sensor_data
|
||||
@@ -167,17 +171,16 @@ class PooldoseSensor(PooldoseEntity, SensorEntity):
|
||||
def native_value(self) -> float | int | str | None:
|
||||
"""Return the current value of the sensor."""
|
||||
data = self.get_data()
|
||||
if data is not None:
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
return data["value"]
|
||||
return None
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
if (
|
||||
self.entity_description.key == "temperature"
|
||||
and (data := self.get_data()) is not None
|
||||
):
|
||||
return data["unit"] # °C or °F
|
||||
if self.entity_description.key == "temperature":
|
||||
data = self.get_data()
|
||||
if isinstance(data, dict) and "unit" in data and data["unit"] is not None:
|
||||
return data["unit"] # °C or °F
|
||||
|
||||
return super().native_unit_of_measurement
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user