Compare commits

..

1 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
c68b0dcb7d Initial plan 2025-11-10 11:21:26 +00:00
574 changed files with 16149 additions and 43279 deletions

View File

@@ -37,7 +37,7 @@ on:
type: boolean
env:
CACHE_VERSION: 2
CACHE_VERSION: 1
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.12"
@@ -622,7 +622,7 @@ jobs:
steps:
- *checkout
- name: Dependency review
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
with:
license-check: false # We use our own license audit checks

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
with:
category: "/language:python"

View File

@@ -231,7 +231,6 @@ homeassistant.components.google_cloud.*
homeassistant.components.google_drive.*
homeassistant.components.google_photos.*
homeassistant.components.google_sheets.*
homeassistant.components.google_weather.*
homeassistant.components.govee_ble.*
homeassistant.components.gpsd.*
homeassistant.components.greeneye_monitor.*

8
CODEOWNERS generated
View File

@@ -607,8 +607,6 @@ build.json @home-assistant/supervisor
/tests/components/google_tasks/ @allenporter
/homeassistant/components/google_travel_time/ @eifinger
/tests/components/google_travel_time/ @eifinger
/homeassistant/components/google_weather/ @tronikos
/tests/components/google_weather/ @tronikos
/homeassistant/components/govee_ble/ @bdraco
/tests/components/govee_ble/ @bdraco
/homeassistant/components/govee_light_local/ @Galorhallen
@@ -1019,8 +1017,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/msteams/ @peroyvind
/homeassistant/components/mullvad/ @meichthys
/tests/components/mullvad/ @meichthys
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
/tests/components/music_assistant/ @music-assistant @arturpragacz
/homeassistant/components/music_assistant/ @music-assistant
/tests/components/music_assistant/ @music-assistant
/homeassistant/components/mutesync/ @currentoor
/tests/components/mutesync/ @currentoor
/homeassistant/components/my/ @home-assistant/core
@@ -1376,8 +1374,6 @@ build.json @home-assistant/supervisor
/tests/components/sanix/ @tomaszsluszniak
/homeassistant/components/satel_integra/ @Tommatheussen
/tests/components/satel_integra/ @Tommatheussen
/homeassistant/components/saunum/ @mettolen
/tests/components/saunum/ @mettolen
/homeassistant/components/scene/ @home-assistant/core
/tests/components/scene/ @home-assistant/core
/homeassistant/components/schedule/ @home-assistant/core

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.11.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.11.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.11.0
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
cosign:
base_identity: https://github.com/home-assistant/docker/.*
identity: https://github.com/home-assistant/core/.*

View File

@@ -15,7 +15,6 @@
"google_tasks",
"google_translate",
"google_travel_time",
"google_weather",
"google_wifi",
"google",
"nest",

View File

@@ -16,7 +16,6 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
@@ -44,9 +43,6 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
name=entry.title,
config_entry=entry,
update_interval=timedelta(seconds=SCAN_INTERVAL),
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=30, immediate=False
),
)
self.api = AmazonEchoApi(
session,

View File

@@ -25,7 +25,7 @@ from .const import (
RECOMMENDED_CHAT_MODEL,
)
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
PLATFORMS = (Platform.CONVERSATION,)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]

View File

@@ -1,80 +0,0 @@
"""AI Task integration for Anthropic."""
from __future__ import annotations
from json import JSONDecodeError
import logging
from homeassistant.components import ai_task, conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.json import json_loads
from .entity import AnthropicBaseLLMEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up AI Task entities."""
for subentry in config_entry.subentries.values():
if subentry.subentry_type != "ai_task_data":
continue
async_add_entities(
[AnthropicTaskEntity(config_entry, subentry)],
config_subentry_id=subentry.subentry_id,
)
class AnthropicTaskEntity(
ai_task.AITaskEntity,
AnthropicBaseLLMEntity,
):
"""Anthropic AI Task entity."""
_attr_supported_features = (
ai_task.AITaskEntityFeature.GENERATE_DATA
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
)
async def _async_generate_data(
self,
task: ai_task.GenDataTask,
chat_log: conversation.ChatLog,
) -> ai_task.GenDataTaskResult:
"""Handle a generate data task."""
await self._async_handle_chat_log(chat_log, task.name, task.structure)
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
raise HomeAssistantError(
"Last content in chat log is not an AssistantContent"
)
text = chat_log.content[-1].content or ""
if not task.structure:
return ai_task.GenDataTaskResult(
conversation_id=chat_log.conversation_id,
data=text,
)
try:
data = json_loads(text)
except JSONDecodeError as err:
_LOGGER.error(
"Failed to parse JSON response: %s. Response: %s",
err,
text,
)
raise HomeAssistantError("Error with Claude structured response") from err
return ai_task.GenDataTaskResult(
conversation_id=chat_log.conversation_id,
data=data,
)

View File

@@ -5,7 +5,6 @@ from __future__ import annotations
from functools import partial
import json
import logging
import re
from typing import Any
import anthropic
@@ -54,7 +53,6 @@ from .const import (
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT_AI_TASK_NAME,
DEFAULT_CONVERSATION_NAME,
DOMAIN,
NON_THINKING_MODELS,
@@ -76,16 +74,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
}
)
RECOMMENDED_CONVERSATION_OPTIONS = {
RECOMMENDED_OPTIONS = {
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
}
RECOMMENDED_AI_TASK_OPTIONS = {
CONF_RECOMMENDED: True,
}
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
"""Validate the user input allows us to connect.
@@ -108,7 +102,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
errors = {}
if user_input is not None:
self._async_abort_entries_match(user_input)
@@ -136,16 +130,10 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
subentries=[
{
"subentry_type": "conversation",
"data": RECOMMENDED_CONVERSATION_OPTIONS,
"data": RECOMMENDED_OPTIONS,
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
},
{
"subentry_type": "ai_task_data",
"data": RECOMMENDED_AI_TASK_OPTIONS,
"title": DEFAULT_AI_TASK_NAME,
"unique_id": None,
},
}
],
)
@@ -159,10 +147,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {
"conversation": ConversationSubentryFlowHandler,
"ai_task_data": ConversationSubentryFlowHandler,
}
return {"conversation": ConversationSubentryFlowHandler}
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
@@ -179,10 +164,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Add a subentry."""
if self._subentry_type == "ai_task_data":
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
else:
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
self.options = RECOMMENDED_OPTIONS.copy()
return await self.async_step_init()
async def async_step_reconfigure(
@@ -216,29 +198,23 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
errors: dict[str, str] = {}
if self._is_new:
if self._subentry_type == "ai_task_data":
default_name = DEFAULT_AI_TASK_NAME
else:
default_name = DEFAULT_CONVERSATION_NAME
step_schema[vol.Required(CONF_NAME, default=default_name)] = str
if self._subentry_type == "conversation":
step_schema.update(
{
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
): SelectSelector(
SelectSelectorConfig(options=hass_apis, multiple=True)
),
}
step_schema[vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME)] = (
str
)
step_schema[
vol.Required(
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
)
] = bool
step_schema.update(
{
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
): SelectSelector(
SelectSelectorConfig(options=hass_apis, multiple=True)
),
vol.Required(
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
): bool,
}
)
if user_input is not None:
if not user_input.get(CONF_LLM_HASS_API):
@@ -284,11 +260,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
vol.Optional(
CONF_CHAT_MODEL,
default=RECOMMENDED_CHAT_MODEL,
): SelectSelector(
SelectSelectorConfig(
options=await self._get_model_list(), custom_value=True
)
),
): str,
vol.Optional(
CONF_MAX_TOKENS,
default=RECOMMENDED_MAX_TOKENS,
@@ -326,14 +298,10 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
if not model.startswith(tuple(NON_THINKING_MODELS)):
step_schema[
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
] = vol.All(
NumberSelector(
NumberSelectorConfig(
min=0,
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
)
),
vol.Coerce(int),
] = NumberSelector(
NumberSelectorConfig(
min=0, max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS)
)
)
else:
self.options.pop(CONF_THINKING_BUDGET, None)
@@ -399,39 +367,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
last_step=True,
)
async def _get_model_list(self) -> list[SelectOptionDict]:
"""Get list of available models."""
try:
client = await self.hass.async_add_executor_job(
partial(
anthropic.AsyncAnthropic,
api_key=self._get_entry().data[CONF_API_KEY],
)
)
models = (await client.models.list()).data
except anthropic.AnthropicError:
models = []
_LOGGER.debug("Available models: %s", models)
model_options: list[SelectOptionDict] = []
short_form = re.compile(r"[^\d]-\d$")
for model_info in models:
# Resolve alias from versioned model name:
model_alias = (
model_info.id[:-9]
if model_info.id
not in ("claude-3-haiku-20240307", "claude-3-opus-20240229")
else model_info.id
)
if short_form.search(model_alias):
model_alias += "-0"
model_options.append(
SelectOptionDict(
label=model_info.display_name,
value=model_alias,
)
)
return model_options
async def _get_location_data(self) -> dict[str, str]:
"""Get approximate location data of the user."""
location_data: dict[str, str] = {}

View File

@@ -6,7 +6,6 @@ DOMAIN = "anthropic"
LOGGER = logging.getLogger(__package__)
DEFAULT_CONVERSATION_NAME = "Claude conversation"
DEFAULT_AI_TASK_NAME = "Claude AI Task"
CONF_RECOMMENDED = "recommended"
CONF_PROMPT = "prompt"

View File

@@ -1,24 +1,17 @@
"""Base entity for Anthropic."""
import base64
from collections.abc import AsyncGenerator, Callable, Iterable
from dataclasses import dataclass, field
import json
from mimetypes import guess_file_type
from pathlib import Path
from typing import Any
import anthropic
from anthropic import AsyncStream
from anthropic.types import (
Base64ImageSourceParam,
Base64PDFSourceParam,
CitationsDelta,
CitationsWebSearchResultLocation,
CitationWebSearchResultLocationParam,
ContentBlockParam,
DocumentBlockParam,
ImageBlockParam,
InputJSONDelta,
MessageDeltaUsage,
MessageParam,
@@ -44,9 +37,6 @@ from anthropic.types import (
ThinkingConfigDisabledParam,
ThinkingConfigEnabledParam,
ThinkingDelta,
ToolChoiceAnyParam,
ToolChoiceAutoParam,
ToolChoiceToolParam,
ToolParam,
ToolResultBlockParam,
ToolUnionParam,
@@ -60,16 +50,13 @@ from anthropic.types import (
WebSearchToolResultError,
)
from anthropic.types.message_create_params import MessageCreateParamsStreaming
import voluptuous as vol
from voluptuous_openapi import convert
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigSubentry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, llm
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from . import AnthropicConfigEntry
from .const import (
@@ -334,7 +321,6 @@ def _convert_content(
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
chat_log: conversation.ChatLog,
stream: AsyncStream[MessageStreamEvent],
output_tool: str | None = None,
) -> AsyncGenerator[
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
]:
@@ -395,16 +381,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
input="",
)
current_tool_args = ""
if response.content_block.name == output_tool:
if first_block or content_details.has_content():
if content_details.has_citations():
content_details.delete_empty()
yield {"native": content_details}
content_details = ContentDetails()
content_details.add_citation_detail()
yield {"role": "assistant"}
has_native = False
first_block = False
elif isinstance(response.content_block, TextBlock):
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
first_block
@@ -495,16 +471,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
first_block = True
elif isinstance(response, RawContentBlockDeltaEvent):
if isinstance(response.delta, InputJSONDelta):
if (
current_tool_block is not None
and current_tool_block["name"] == output_tool
):
content_details.citation_details[-1].length += len(
response.delta.partial_json
)
yield {"content": response.delta.partial_json}
else:
current_tool_args += response.delta.partial_json
current_tool_args += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
content_details.citation_details[-1].length += len(response.delta.text)
yield {"content": response.delta.text}
@@ -523,9 +490,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
content_details.add_citation(response.delta.citation)
elif isinstance(response, RawContentBlockStopEvent):
if current_tool_block is not None:
if current_tool_block["name"] == output_tool:
current_tool_block = None
continue
tool_args = json.loads(current_tool_args) if current_tool_args else {}
current_tool_block["input"] = tool_args
yield {
@@ -593,8 +557,6 @@ class AnthropicBaseLLMEntity(Entity):
async def _async_handle_chat_log(
self,
chat_log: conversation.ChatLog,
structure_name: str | None = None,
structure: vol.Schema | None = None,
) -> None:
"""Generate an answer for the chat log."""
options = self.subentry.data
@@ -651,74 +613,6 @@ class AnthropicBaseLLMEntity(Entity):
}
tools.append(web_search)
# Handle attachments by adding them to the last user message
last_content = chat_log.content[-1]
if last_content.role == "user" and last_content.attachments:
last_message = messages[-1]
if last_message["role"] != "user":
raise HomeAssistantError(
"Last message must be a user message to add attachments"
)
if isinstance(last_message["content"], str):
last_message["content"] = [
TextBlockParam(type="text", text=last_message["content"])
]
last_message["content"].extend( # type: ignore[union-attr]
await async_prepare_files_for_prompt(
self.hass, [(a.path, a.mime_type) for a in last_content.attachments]
)
)
if structure and structure_name:
structure_name = slugify(structure_name)
if model_args["thinking"]["type"] == "disabled":
if not tools:
# Simplest case: no tools and no extended thinking
# Add a tool and force its use
model_args["tool_choice"] = ToolChoiceToolParam(
type="tool",
name=structure_name,
)
else:
# Second case: tools present but no extended thinking
# Allow the model to use any tool but not text response
# The model should know to use the right tool by its description
model_args["tool_choice"] = ToolChoiceAnyParam(
type="any",
)
else:
# Extended thinking is enabled. With extended thinking, we cannot
# force tool use or disable text responses, so we add a hint to the
# system prompt instead. With extended thinking, the model should be
# smart enough to use the tool.
model_args["tool_choice"] = ToolChoiceAutoParam(
type="auto",
)
if isinstance(model_args["system"], str):
model_args["system"] = [
TextBlockParam(type="text", text=model_args["system"])
]
model_args["system"].append( # type: ignore[union-attr]
TextBlockParam(
type="text",
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
)
)
tools.append(
ToolParam(
name=structure_name,
description="Use this tool to reply to the user",
input_schema=convert(
structure,
custom_serializer=chat_log.llm_api.custom_serializer
if chat_log.llm_api
else llm.selector_serializer,
),
)
)
if tools:
model_args["tools"] = tools
@@ -735,11 +629,7 @@ class AnthropicBaseLLMEntity(Entity):
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(
chat_log,
stream,
output_tool=structure_name if structure else None,
),
_transform_stream(chat_log, stream),
)
]
)
@@ -751,59 +641,3 @@ class AnthropicBaseLLMEntity(Entity):
if not chat_log.unresponded_tool_results:
break
async def async_prepare_files_for_prompt(
hass: HomeAssistant, files: list[tuple[Path, str | None]]
) -> Iterable[ImageBlockParam | DocumentBlockParam]:
"""Append files to a prompt.
Caller needs to ensure that the files are allowed.
"""
def append_files_to_content() -> Iterable[ImageBlockParam | DocumentBlockParam]:
content: list[ImageBlockParam | DocumentBlockParam] = []
for file_path, mime_type in files:
if not file_path.exists():
raise HomeAssistantError(f"`{file_path}` does not exist")
if mime_type is None:
mime_type = guess_file_type(file_path)[0]
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
raise HomeAssistantError(
"Only images and PDF are supported by the Anthropic API,"
f"`{file_path}` is not an image file or PDF"
)
if mime_type == "image/jpg":
mime_type = "image/jpeg"
base64_file = base64.b64encode(file_path.read_bytes()).decode("utf-8")
if mime_type.startswith("image/"):
content.append(
ImageBlockParam(
type="image",
source=Base64ImageSourceParam(
type="base64",
media_type=mime_type, # type: ignore[typeddict-item]
data=base64_file,
),
)
)
elif mime_type.startswith("application/pdf"):
content.append(
DocumentBlockParam(
type="document",
source=Base64PDFSourceParam(
type="base64",
media_type=mime_type, # type: ignore[typeddict-item]
data=base64_file,
),
)
)
return content
return await hass.async_add_executor_job(append_files_to_content)

View File

@@ -18,49 +18,6 @@
}
},
"config_subentries": {
"ai_task_data": {
"abort": {
"entry_not_loaded": "[%key:component::anthropic::config_subentries::conversation::abort::entry_not_loaded%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"entry_type": "AI task",
"initiate_flow": {
"reconfigure": "Reconfigure AI task",
"user": "Add AI task"
},
"step": {
"advanced": {
"data": {
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
},
"init": {
"data": {
"name": "[%key:common::config_flow::data::name%]",
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
},
"model": {
"data": {
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
},
"data_description": {
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::model::title%]"
}
}
},
"conversation": {
"abort": {
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
@@ -89,8 +46,7 @@
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
},
"title": "Basic settings"
}
},
"model": {
"data": {

View File

@@ -111,6 +111,8 @@ def handle_errors_and_zip[_AsusWrtBridgeT: AsusWrtBridge](
if isinstance(data, dict):
return dict(zip(keys, list(data.values()), strict=False))
if not isinstance(data, (list, tuple)):
raise UpdateFailed("Received invalid data type")
return dict(zip(keys, data, strict=False))
return _wrapper

View File

@@ -74,11 +74,8 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
super().__init__(data.fast_coordinator, data)
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
# Set temperature range if available, otherwise use Home Assistant defaults
if data.static.min_temp is not None and data.static.min_temp.value is not None:
self._attr_min_temp = data.static.min_temp.value
if data.static.max_temp is not None and data.static.max_temp.value is not None:
self._attr_max_temp = data.static.max_temp.value
self._attr_min_temp = data.static.min_temp.value
self._attr_max_temp = data.static.max_temp.value
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
@property

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["bsblan"],
"requirements": ["python-bsblan==3.1.1"],
"requirements": ["python-bsblan==3.1.0"],
"zeroconf": [
{
"name": "bsb-lan*",

View File

@@ -55,7 +55,6 @@ from .const import (
CONF_ALIASES,
CONF_API_SERVER,
CONF_COGNITO_CLIENT_ID,
CONF_DISCOVERY_SERVICE_ACTIONS,
CONF_ENTITY_CONFIG,
CONF_FILTER,
CONF_GOOGLE_ACTIONS,
@@ -140,7 +139,6 @@ CONFIG_SCHEMA = vol.Schema(
{
vol.Required(CONF_MODE): vol.In([MODE_DEV]),
vol.Required(CONF_API_SERVER): str,
vol.Optional(CONF_DISCOVERY_SERVICE_ACTIONS): {str: cv.url},
}
),
_BASE_CONFIG_SCHEMA.extend(

View File

@@ -79,7 +79,6 @@ CONF_ACCOUNT_LINK_SERVER = "account_link_server"
CONF_ACCOUNTS_SERVER = "accounts_server"
CONF_ACME_SERVER = "acme_server"
CONF_API_SERVER = "api_server"
CONF_DISCOVERY_SERVICE_ACTIONS = "discovery_service_actions"
CONF_RELAYER_SERVER = "relayer_server"
CONF_REMOTESTATE_SERVER = "remotestate_server"
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"

View File

@@ -37,6 +37,13 @@ USER_SCHEMA = vol.Schema(
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
}
)
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
@@ -168,55 +175,36 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle reconfiguration of the device."""
reconfigure_entry = self._get_reconfigure_entry()
if not user_input:
return self.async_show_form(
step_id="reconfigure", data_schema=STEP_RECONFIGURE
)
updated_host = user_input[CONF_HOST]
self._async_abort_entries_match({CONF_HOST: updated_host})
errors: dict[str, str] = {}
if user_input is not None:
updated_host = user_input[CONF_HOST]
self._async_abort_entries_match({CONF_HOST: updated_host})
try:
data_to_validate = {
CONF_HOST: updated_host,
CONF_PORT: user_input[CONF_PORT],
CONF_PIN: user_input[CONF_PIN],
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
}
await validate_input(self.hass, data_to_validate)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
data_updates = {
CONF_HOST: updated_host,
CONF_PORT: user_input[CONF_PORT],
CONF_PIN: user_input[CONF_PIN],
}
return self.async_update_reload_and_abort(
reconfigure_entry, data_updates=data_updates
)
schema = vol.Schema(
{
vol.Required(
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
): cv.string,
vol.Required(
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
): cv.port,
vol.Optional(CONF_PIN): cv.string,
}
)
try:
await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reconfigure_entry, data_updates={CONF_HOST: updated_host}
)
return self.async_show_form(
step_id="reconfigure",
data_schema=schema,
data_schema=STEP_RECONFIGURE,
errors=errors,
)

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
import logging
from typing import Any, Literal
from typing import Literal
from hassil.recognize import RecognizeResult
import voluptuous as vol
@@ -21,7 +21,6 @@ from homeassistant.core import (
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, intent
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.reload import async_integration_yaml_config
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
@@ -53,8 +52,6 @@ from .const import (
DATA_COMPONENT,
DOMAIN,
HOME_ASSISTANT_AGENT,
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
SERVICE_PROCESS,
SERVICE_RELOAD,
ConversationEntityFeature,
@@ -269,13 +266,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component
manager = get_agent_manager(hass)
hass_config_path = hass.config.path()
config_intents = _get_config_intents(config, hass_config_path)
manager.update_config_intents(config_intents)
await async_setup_default_agent(hass, entity_component)
agent_config = config.get(DOMAIN, {})
await async_setup_default_agent(
hass, entity_component, config_intents=agent_config.get("intents", {})
)
async def handle_process(service: ServiceCall) -> ServiceResponse:
"""Parse text into commands."""
@@ -300,16 +294,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def handle_reload(service: ServiceCall) -> None:
"""Reload intents."""
language = service.data.get(ATTR_LANGUAGE)
if language is None:
conf = await async_integration_yaml_config(hass, DOMAIN)
if conf is not None:
config_intents = _get_config_intents(conf, hass_config_path)
manager.update_config_intents(config_intents)
agent = manager.default_agent
agent = get_agent_manager(hass).default_agent
if agent is not None:
await agent.async_reload(language=language)
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
hass.services.async_register(
DOMAIN,
@@ -326,27 +313,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str, Any]:
"""Return config intents."""
intents = config.get(DOMAIN, {}).get("intents", {})
return {
"intents": {
intent_name: {
"data": [
{
"sentences": sentences,
"metadata": {
METADATA_CUSTOM_SENTENCE: True,
METADATA_CUSTOM_FILE: hass_config_path,
},
}
]
}
for intent_name, sentences in intents.items()
}
}
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)

View File

@@ -147,7 +147,6 @@ class AgentManager:
self.hass = hass
self._agents: dict[str, AbstractConversationAgent] = {}
self.default_agent: DefaultAgent | None = None
self.config_intents: dict[str, Any] = {}
self.triggers_details: list[TriggerDetails] = []
@callback
@@ -200,16 +199,9 @@ class AgentManager:
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
"""Set up the default agent."""
agent.update_config_intents(self.config_intents)
agent.update_triggers(self.triggers_details)
self.default_agent = agent
def update_config_intents(self, intents: dict[str, Any]) -> None:
"""Update config intents."""
self.config_intents = intents
if self.default_agent is not None:
self.default_agent.update_config_intents(intents)
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
"""Register a trigger."""
self.triggers_details.append(trigger_details)

View File

@@ -30,7 +30,3 @@ class ConversationEntityFeature(IntFlag):
"""Supported features of the conversation entity."""
CONTROL = 1
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"

View File

@@ -77,12 +77,7 @@ from homeassistant.util.json import JsonObjectType, json_loads_object
from .agent_manager import get_agent_manager
from .chat_log import AssistantContent, ChatLog
from .const import (
DOMAIN,
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
ConversationEntityFeature,
)
from .const import DOMAIN, ConversationEntityFeature
from .entity import ConversationEntity
from .models import ConversationInput, ConversationResult
from .trace import ConversationTraceEventType, async_conversation_trace_append
@@ -96,6 +91,8 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
ERROR_SENTINEL = object()
@@ -205,9 +202,10 @@ class IntentCache:
async def async_setup_default_agent(
hass: HomeAssistant,
entity_component: EntityComponent[ConversationEntity],
config_intents: dict[str, Any],
) -> None:
"""Set up entity registry listener for the default agent."""
agent = DefaultAgent(hass)
agent = DefaultAgent(hass, config_intents)
await entity_component.async_add_entities([agent])
await get_agent_manager(hass).async_setup_default_agent(agent)
@@ -232,14 +230,14 @@ class DefaultAgent(ConversationEntity):
_attr_name = "Home Assistant"
_attr_supported_features = ConversationEntityFeature.CONTROL
def __init__(self, hass: HomeAssistant) -> None:
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
"""Initialize the default agent."""
self.hass = hass
self._lang_intents: dict[str, LanguageIntents | object] = {}
self._load_intents_lock = asyncio.Lock()
# Intents from common conversation config
self._config_intents: dict[str, Any] = {}
# intent -> [sentences]
self._config_intents: dict[str, Any] = config_intents
# Sentences that will trigger a callback (skipping intent recognition)
self._triggers_details: list[TriggerDetails] = []
@@ -1037,14 +1035,6 @@ class DefaultAgent(ConversationEntity):
# Intents have changed, so we must clear the cache
self._intent_cache.clear()
@callback
def update_config_intents(self, intents: dict[str, Any]) -> None:
"""Update config intents."""
self._config_intents = intents
# Intents have changed, so we must clear the cache
self._intent_cache.clear()
async def async_prepare(self, language: str | None = None) -> None:
"""Load intents for a language."""
if language is None:
@@ -1169,10 +1159,33 @@ class DefaultAgent(ConversationEntity):
custom_sentences_path,
)
merge_dict(
intents_dict,
self._config_intents,
)
# Load sentences from HA config for default language only
if self._config_intents and (
self.hass.config.language in (language, language_variant)
):
hass_config_path = self.hass.config.path()
merge_dict(
intents_dict,
{
"intents": {
intent_name: {
"data": [
{
"sentences": sentences,
"metadata": {
METADATA_CUSTOM_SENTENCE: True,
METADATA_CUSTOM_FILE: hass_config_path,
},
}
]
}
for intent_name, sentences in self._config_intents.items()
}
},
)
_LOGGER.debug(
"Loaded intents from configuration.yaml",
)
if not intents_dict:
return None

View File

@@ -8,7 +8,7 @@
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
"iot_class": "local_push",
"loggers": ["async_upnp_client"],
"requirements": ["async-upnp-client==0.46.0", "getmac==0.9.5"],
"requirements": ["async-upnp-client==0.45.0", "getmac==0.9.5"],
"ssdp": [
{
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",

View File

@@ -7,7 +7,7 @@
"dependencies": ["ssdp"],
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
"iot_class": "local_polling",
"requirements": ["async-upnp-client==0.46.0"],
"requirements": ["async-upnp-client==0.45.0"],
"ssdp": [
{
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",

View File

@@ -9,7 +9,7 @@
},
"iot_class": "cloud_polling",
"loggers": ["pyecobee"],
"requirements": ["python-ecobee-api==0.3.2"],
"requirements": ["python-ecobee-api==0.2.20"],
"single_config_entry": true,
"zeroconf": [
{

View File

@@ -1,84 +0,0 @@
rules:
# todo : add get_feed_list to the library
# todo : see if we can drop some extra attributes
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: |
test_reconfigure_api_error should use a mock config entry fixture
test_user_flow_failure should use a mock config entry fixture
move test_user_flow_* to the top of the file
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
No events are explicitly registered by the integration.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: todo
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: todo
test-coverage:
status: todo
comment: |
test the entry state in test_failure
# Gold
devices: todo
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: done
docs-examples:
status: exempt
comment: |
This integration does not provide any automation
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class:
status: todo
comment: change device_class=SensorDeviceClass.SIGNAL_STRENGTH to SOUND_PRESSURE
entity-disabled-by-default: todo
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: done
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
import asyncio
from collections import Counter
from collections.abc import Awaitable, Callable
from typing import Literal, NotRequired, TypedDict
from typing import Literal, TypedDict
import voluptuous as vol
@@ -29,7 +29,7 @@ async def async_get_manager(hass: HomeAssistant) -> EnergyManager:
class FlowFromGridSourceType(TypedDict):
"""Dictionary describing the 'from' stat for the grid source."""
# statistic_id of an energy meter (kWh)
# statistic_id of a an energy meter (kWh)
stat_energy_from: str
# statistic_id of costs ($) incurred from the energy meter
@@ -58,14 +58,6 @@ class FlowToGridSourceType(TypedDict):
number_energy_price: float | None # Price for energy ($/kWh)
class GridPowerSourceType(TypedDict):
"""Dictionary holding the source of grid power consumption."""
# statistic_id of a power meter (kW)
# negative values indicate grid return
stat_rate: str
class GridSourceType(TypedDict):
"""Dictionary holding the source of grid energy consumption."""
@@ -73,7 +65,6 @@ class GridSourceType(TypedDict):
flow_from: list[FlowFromGridSourceType]
flow_to: list[FlowToGridSourceType]
power: NotRequired[list[GridPowerSourceType]]
cost_adjustment_day: float
@@ -84,7 +75,6 @@ class SolarSourceType(TypedDict):
type: Literal["solar"]
stat_energy_from: str
stat_rate: NotRequired[str]
config_entry_solar_forecast: list[str] | None
@@ -95,8 +85,6 @@ class BatterySourceType(TypedDict):
stat_energy_from: str
stat_energy_to: str
# positive when discharging, negative when charging
stat_rate: NotRequired[str]
class GasSourceType(TypedDict):
@@ -148,15 +136,12 @@ class DeviceConsumption(TypedDict):
# This is an ever increasing value
stat_consumption: str
# Instantaneous rate of flow: W, L/min or m³/h
stat_rate: NotRequired[str]
# An optional custom name for display in energy graphs
name: str | None
# An optional statistic_id identifying a device
# that includes this device's consumption in its total
included_in_stat: NotRequired[str]
included_in_stat: str | None
class EnergyPreferences(TypedDict):
@@ -209,12 +194,6 @@ FLOW_TO_GRID_SOURCE_SCHEMA = vol.Schema(
}
)
GRID_POWER_SOURCE_SCHEMA = vol.Schema(
{
vol.Required("stat_rate"): str,
}
)
def _generate_unique_value_validator(key: str) -> Callable[[list[dict]], list[dict]]:
"""Generate a validator that ensures a value is only used once."""
@@ -245,10 +224,6 @@ GRID_SOURCE_SCHEMA = vol.Schema(
[FLOW_TO_GRID_SOURCE_SCHEMA],
_generate_unique_value_validator("stat_energy_to"),
),
vol.Optional("power"): vol.All(
[GRID_POWER_SOURCE_SCHEMA],
_generate_unique_value_validator("stat_rate"),
),
vol.Required("cost_adjustment_day"): vol.Coerce(float),
}
)
@@ -256,7 +231,6 @@ SOLAR_SOURCE_SCHEMA = vol.Schema(
{
vol.Required("type"): "solar",
vol.Required("stat_energy_from"): str,
vol.Optional("stat_rate"): str,
vol.Optional("config_entry_solar_forecast"): vol.Any([str], None),
}
)
@@ -265,7 +239,6 @@ BATTERY_SOURCE_SCHEMA = vol.Schema(
vol.Required("type"): "battery",
vol.Required("stat_energy_from"): str,
vol.Required("stat_energy_to"): str,
vol.Optional("stat_rate"): str,
}
)
GAS_SOURCE_SCHEMA = vol.Schema(
@@ -321,7 +294,6 @@ ENERGY_SOURCE_SCHEMA = vol.All(
DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
{
vol.Required("stat_consumption"): str,
vol.Optional("stat_rate"): str,
vol.Optional("name"): str,
vol.Optional("included_in_stat"): str,
}

View File

@@ -12,7 +12,6 @@ from homeassistant.const import (
STATE_UNAVAILABLE,
STATE_UNKNOWN,
UnitOfEnergy,
UnitOfPower,
UnitOfVolume,
)
from homeassistant.core import HomeAssistant, callback, valid_entity_id
@@ -24,17 +23,12 @@ ENERGY_USAGE_DEVICE_CLASSES = (sensor.SensorDeviceClass.ENERGY,)
ENERGY_USAGE_UNITS: dict[str, tuple[UnitOfEnergy, ...]] = {
sensor.SensorDeviceClass.ENERGY: tuple(UnitOfEnergy)
}
POWER_USAGE_DEVICE_CLASSES = (sensor.SensorDeviceClass.POWER,)
POWER_USAGE_UNITS: dict[str, tuple[UnitOfPower, ...]] = {
sensor.SensorDeviceClass.POWER: tuple(UnitOfPower)
}
ENERGY_PRICE_UNITS = tuple(
f"/{unit}" for units in ENERGY_USAGE_UNITS.values() for unit in units
)
ENERGY_UNIT_ERROR = "entity_unexpected_unit_energy"
ENERGY_PRICE_UNIT_ERROR = "entity_unexpected_unit_energy_price"
POWER_UNIT_ERROR = "entity_unexpected_unit_power"
GAS_USAGE_DEVICE_CLASSES = (
sensor.SensorDeviceClass.ENERGY,
sensor.SensorDeviceClass.GAS,
@@ -88,10 +82,6 @@ def _get_placeholders(hass: HomeAssistant, issue_type: str) -> dict[str, str] |
f"{currency}{unit}" for unit in ENERGY_PRICE_UNITS
),
}
if issue_type == POWER_UNIT_ERROR:
return {
"power_units": ", ".join(POWER_USAGE_UNITS[sensor.SensorDeviceClass.POWER]),
}
if issue_type == GAS_UNIT_ERROR:
return {
"energy_units": ", ".join(GAS_USAGE_UNITS[sensor.SensorDeviceClass.ENERGY]),
@@ -169,7 +159,7 @@ class EnergyPreferencesValidation:
@callback
def _async_validate_stat_common(
def _async_validate_usage_stat(
hass: HomeAssistant,
metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
stat_id: str,
@@ -177,41 +167,37 @@ def _async_validate_stat_common(
allowed_units: Mapping[str, Sequence[str]],
unit_error: str,
issues: ValidationIssues,
check_negative: bool = False,
) -> str | None:
"""Validate common aspects of a statistic.
Returns the entity_id if validation succeeds, None otherwise.
"""
) -> None:
"""Validate a statistic."""
if stat_id not in metadata:
issues.add_issue(hass, "statistics_not_defined", stat_id)
has_entity_source = valid_entity_id(stat_id)
if not has_entity_source:
return None
return
entity_id = stat_id
if not recorder.is_entity_recorded(hass, entity_id):
issues.add_issue(hass, "recorder_untracked", entity_id)
return None
return
if (state := hass.states.get(entity_id)) is None:
issues.add_issue(hass, "entity_not_defined", entity_id)
return None
return
if state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
issues.add_issue(hass, "entity_unavailable", entity_id, state.state)
return None
return
try:
current_value: float | None = float(state.state)
except ValueError:
issues.add_issue(hass, "entity_state_non_numeric", entity_id, state.state)
return None
return
if check_negative and current_value is not None and current_value < 0:
if current_value is not None and current_value < 0:
issues.add_issue(hass, "entity_negative_state", entity_id, current_value)
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
@@ -225,36 +211,6 @@ def _async_validate_stat_common(
if device_class and unit not in allowed_units.get(device_class, []):
issues.add_issue(hass, unit_error, entity_id, unit)
return entity_id
@callback
def _async_validate_usage_stat(
hass: HomeAssistant,
metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
stat_id: str,
allowed_device_classes: Sequence[str],
allowed_units: Mapping[str, Sequence[str]],
unit_error: str,
issues: ValidationIssues,
) -> None:
"""Validate a statistic."""
entity_id = _async_validate_stat_common(
hass,
metadata,
stat_id,
allowed_device_classes,
allowed_units,
unit_error,
issues,
check_negative=True,
)
if entity_id is None:
return
state = hass.states.get(entity_id)
assert state is not None
state_class = state.attributes.get(sensor.ATTR_STATE_CLASS)
allowed_state_classes = [
@@ -299,39 +255,6 @@ def _async_validate_price_entity(
issues.add_issue(hass, unit_error, entity_id, unit)
@callback
def _async_validate_power_stat(
hass: HomeAssistant,
metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
stat_id: str,
allowed_device_classes: Sequence[str],
allowed_units: Mapping[str, Sequence[str]],
unit_error: str,
issues: ValidationIssues,
) -> None:
"""Validate a power statistic."""
entity_id = _async_validate_stat_common(
hass,
metadata,
stat_id,
allowed_device_classes,
allowed_units,
unit_error,
issues,
check_negative=False,
)
if entity_id is None:
return
state = hass.states.get(entity_id)
assert state is not None
state_class = state.attributes.get(sensor.ATTR_STATE_CLASS)
if state_class != sensor.SensorStateClass.MEASUREMENT:
issues.add_issue(hass, "entity_unexpected_state_class", entity_id, state_class)
@callback
def _async_validate_cost_stat(
hass: HomeAssistant,
@@ -386,260 +309,11 @@ def _async_validate_auto_generated_cost_entity(
issues.add_issue(hass, "recorder_untracked", cost_entity_id)
def _validate_grid_source(
hass: HomeAssistant,
source: data.GridSourceType,
statistics_metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
wanted_statistics_metadata: set[str],
source_result: ValidationIssues,
validate_calls: list[functools.partial[None]],
) -> None:
"""Validate grid energy source."""
flow_from: data.FlowFromGridSourceType
for flow_from in source["flow_from"]:
wanted_statistics_metadata.add(flow_from["stat_energy_from"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
flow_from["stat_energy_from"],
ENERGY_USAGE_DEVICE_CLASSES,
ENERGY_USAGE_UNITS,
ENERGY_UNIT_ERROR,
source_result,
)
)
if (stat_cost := flow_from.get("stat_cost")) is not None:
wanted_statistics_metadata.add(stat_cost)
validate_calls.append(
functools.partial(
_async_validate_cost_stat,
hass,
statistics_metadata,
stat_cost,
source_result,
)
)
elif (entity_energy_price := flow_from.get("entity_energy_price")) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
source_result,
ENERGY_PRICE_UNITS,
ENERGY_PRICE_UNIT_ERROR,
)
)
if (
flow_from.get("entity_energy_price") is not None
or flow_from.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
flow_from["stat_energy_from"],
source_result,
)
)
flow_to: data.FlowToGridSourceType
for flow_to in source["flow_to"]:
wanted_statistics_metadata.add(flow_to["stat_energy_to"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
flow_to["stat_energy_to"],
ENERGY_USAGE_DEVICE_CLASSES,
ENERGY_USAGE_UNITS,
ENERGY_UNIT_ERROR,
source_result,
)
)
if (stat_compensation := flow_to.get("stat_compensation")) is not None:
wanted_statistics_metadata.add(stat_compensation)
validate_calls.append(
functools.partial(
_async_validate_cost_stat,
hass,
statistics_metadata,
stat_compensation,
source_result,
)
)
elif (entity_energy_price := flow_to.get("entity_energy_price")) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
source_result,
ENERGY_PRICE_UNITS,
ENERGY_PRICE_UNIT_ERROR,
)
)
if (
flow_to.get("entity_energy_price") is not None
or flow_to.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
flow_to["stat_energy_to"],
source_result,
)
)
for power_stat in source.get("power", []):
wanted_statistics_metadata.add(power_stat["stat_rate"])
validate_calls.append(
functools.partial(
_async_validate_power_stat,
hass,
statistics_metadata,
power_stat["stat_rate"],
POWER_USAGE_DEVICE_CLASSES,
POWER_USAGE_UNITS,
POWER_UNIT_ERROR,
source_result,
)
)
def _validate_gas_source(
hass: HomeAssistant,
source: data.GasSourceType,
statistics_metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
wanted_statistics_metadata: set[str],
source_result: ValidationIssues,
validate_calls: list[functools.partial[None]],
) -> None:
"""Validate gas energy source."""
wanted_statistics_metadata.add(source["stat_energy_from"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
source["stat_energy_from"],
GAS_USAGE_DEVICE_CLASSES,
GAS_USAGE_UNITS,
GAS_UNIT_ERROR,
source_result,
)
)
if (stat_cost := source.get("stat_cost")) is not None:
wanted_statistics_metadata.add(stat_cost)
validate_calls.append(
functools.partial(
_async_validate_cost_stat,
hass,
statistics_metadata,
stat_cost,
source_result,
)
)
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
source_result,
GAS_PRICE_UNITS,
GAS_PRICE_UNIT_ERROR,
)
)
if (
source.get("entity_energy_price") is not None
or source.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
source["stat_energy_from"],
source_result,
)
)
def _validate_water_source(
hass: HomeAssistant,
source: data.WaterSourceType,
statistics_metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
wanted_statistics_metadata: set[str],
source_result: ValidationIssues,
validate_calls: list[functools.partial[None]],
) -> None:
"""Validate water energy source."""
wanted_statistics_metadata.add(source["stat_energy_from"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
source["stat_energy_from"],
WATER_USAGE_DEVICE_CLASSES,
WATER_USAGE_UNITS,
WATER_UNIT_ERROR,
source_result,
)
)
if (stat_cost := source.get("stat_cost")) is not None:
wanted_statistics_metadata.add(stat_cost)
validate_calls.append(
functools.partial(
_async_validate_cost_stat,
hass,
statistics_metadata,
stat_cost,
source_result,
)
)
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
source_result,
WATER_PRICE_UNITS,
WATER_PRICE_UNIT_ERROR,
)
)
if (
source.get("entity_energy_price") is not None
or source.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
source["stat_energy_from"],
source_result,
)
)
async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
"""Validate the energy configuration."""
manager: data.EnergyManager = await data.async_get_manager(hass)
statistics_metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]] = {}
validate_calls: list[functools.partial[None]] = []
validate_calls = []
wanted_statistics_metadata: set[str] = set()
result = EnergyPreferencesValidation()
@@ -653,35 +327,215 @@ async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
result.energy_sources.append(source_result)
if source["type"] == "grid":
_validate_grid_source(
hass,
source,
statistics_metadata,
wanted_statistics_metadata,
source_result,
validate_calls,
)
flow: data.FlowFromGridSourceType | data.FlowToGridSourceType
for flow in source["flow_from"]:
wanted_statistics_metadata.add(flow["stat_energy_from"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
flow["stat_energy_from"],
ENERGY_USAGE_DEVICE_CLASSES,
ENERGY_USAGE_UNITS,
ENERGY_UNIT_ERROR,
source_result,
)
)
if (stat_cost := flow.get("stat_cost")) is not None:
wanted_statistics_metadata.add(stat_cost)
validate_calls.append(
functools.partial(
_async_validate_cost_stat,
hass,
statistics_metadata,
stat_cost,
source_result,
)
)
elif (
entity_energy_price := flow.get("entity_energy_price")
) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
source_result,
ENERGY_PRICE_UNITS,
ENERGY_PRICE_UNIT_ERROR,
)
)
if (
flow.get("entity_energy_price") is not None
or flow.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
flow["stat_energy_from"],
source_result,
)
)
for flow in source["flow_to"]:
wanted_statistics_metadata.add(flow["stat_energy_to"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
flow["stat_energy_to"],
ENERGY_USAGE_DEVICE_CLASSES,
ENERGY_USAGE_UNITS,
ENERGY_UNIT_ERROR,
source_result,
)
)
if (stat_compensation := flow.get("stat_compensation")) is not None:
wanted_statistics_metadata.add(stat_compensation)
validate_calls.append(
functools.partial(
_async_validate_cost_stat,
hass,
statistics_metadata,
stat_compensation,
source_result,
)
)
elif (
entity_energy_price := flow.get("entity_energy_price")
) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
source_result,
ENERGY_PRICE_UNITS,
ENERGY_PRICE_UNIT_ERROR,
)
)
if (
flow.get("entity_energy_price") is not None
or flow.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
flow["stat_energy_to"],
source_result,
)
)
elif source["type"] == "gas":
_validate_gas_source(
hass,
source,
statistics_metadata,
wanted_statistics_metadata,
source_result,
validate_calls,
wanted_statistics_metadata.add(source["stat_energy_from"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
source["stat_energy_from"],
GAS_USAGE_DEVICE_CLASSES,
GAS_USAGE_UNITS,
GAS_UNIT_ERROR,
source_result,
)
)
if (stat_cost := source.get("stat_cost")) is not None:
wanted_statistics_metadata.add(stat_cost)
validate_calls.append(
functools.partial(
_async_validate_cost_stat,
hass,
statistics_metadata,
stat_cost,
source_result,
)
)
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
source_result,
GAS_PRICE_UNITS,
GAS_PRICE_UNIT_ERROR,
)
)
if (
source.get("entity_energy_price") is not None
or source.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
source["stat_energy_from"],
source_result,
)
)
elif source["type"] == "water":
_validate_water_source(
hass,
source,
statistics_metadata,
wanted_statistics_metadata,
source_result,
validate_calls,
wanted_statistics_metadata.add(source["stat_energy_from"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
source["stat_energy_from"],
WATER_USAGE_DEVICE_CLASSES,
WATER_USAGE_UNITS,
WATER_UNIT_ERROR,
source_result,
)
)
if (stat_cost := source.get("stat_cost")) is not None:
wanted_statistics_metadata.add(stat_cost)
validate_calls.append(
functools.partial(
_async_validate_cost_stat,
hass,
statistics_metadata,
stat_cost,
source_result,
)
)
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
source_result,
WATER_PRICE_UNITS,
WATER_PRICE_UNIT_ERROR,
)
)
if (
source.get("entity_energy_price") is not None
or source.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
source["stat_energy_from"],
source_result,
)
)
elif source["type"] == "solar":
wanted_statistics_metadata.add(source["stat_energy_from"])
validate_calls.append(

View File

@@ -147,8 +147,6 @@ async def async_get_config_entry_diagnostics(
"ctmeter_production_phases": envoy_data.ctmeter_production_phases,
"ctmeter_consumption_phases": envoy_data.ctmeter_consumption_phases,
"ctmeter_storage_phases": envoy_data.ctmeter_storage_phases,
"ctmeters": envoy_data.ctmeters,
"ctmeters_phases": envoy_data.ctmeters_phases,
"dry_contact_status": envoy_data.dry_contact_status,
"dry_contact_settings": envoy_data.dry_contact_settings,
"inverters": envoy_data.inverters,
@@ -181,7 +179,6 @@ async def async_get_config_entry_diagnostics(
"ct_consumption_meter": envoy.consumption_meter_type,
"ct_production_meter": envoy.production_meter_type,
"ct_storage_meter": envoy.storage_meter_type,
"ct_meters": list(envoy_data.ctmeters.keys()),
}
fixture_data: dict[str, Any] = {}

View File

@@ -399,189 +399,117 @@ class EnvoyCTSensorEntityDescription(SensorEntityDescription):
cttype: str | None = None
# All ct types unified in common setup
CT_SENSORS = (
[
EnvoyCTSensorEntityDescription(
key=key,
translation_key=key,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.ENERGY,
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
suggested_display_precision=3,
value_fn=attrgetter("energy_delivered"),
on_phase=None,
cttype=cttype,
)
for cttype, key in (
(CtType.NET_CONSUMPTION, "lifetime_net_consumption"),
# Production CT energy_delivered is not used
(CtType.STORAGE, "lifetime_battery_discharged"),
)
]
+ [
EnvoyCTSensorEntityDescription(
key=key,
translation_key=key,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.ENERGY,
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
suggested_display_precision=3,
value_fn=attrgetter("energy_received"),
on_phase=None,
cttype=cttype,
)
for cttype, key in (
(CtType.NET_CONSUMPTION, "lifetime_net_production"),
# Production CT energy_received is not used
(CtType.STORAGE, "lifetime_battery_charged"),
)
]
+ [
EnvoyCTSensorEntityDescription(
key=key,
translation_key=key,
native_unit_of_measurement=UnitOfPower.WATT,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
suggested_display_precision=3,
value_fn=attrgetter("active_power"),
on_phase=None,
cttype=cttype,
)
for cttype, key in (
(CtType.NET_CONSUMPTION, "net_consumption"),
# Production CT active_power is not used
(CtType.STORAGE, "battery_discharge"),
)
]
+ [
EnvoyCTSensorEntityDescription(
key=key,
translation_key=(translation_key if translation_key != "" else key),
native_unit_of_measurement=UnitOfFrequency.HERTZ,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.FREQUENCY,
suggested_display_precision=1,
entity_registry_enabled_default=False,
value_fn=attrgetter("frequency"),
on_phase=None,
cttype=cttype,
)
for cttype, key, translation_key in (
(CtType.NET_CONSUMPTION, "frequency", "net_ct_frequency"),
(CtType.PRODUCTION, "production_ct_frequency", ""),
(CtType.STORAGE, "storage_ct_frequency", ""),
)
]
+ [
EnvoyCTSensorEntityDescription(
key=key,
translation_key=(translation_key if translation_key != "" else key),
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.VOLTAGE,
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
suggested_display_precision=1,
entity_registry_enabled_default=False,
value_fn=attrgetter("voltage"),
on_phase=None,
cttype=cttype,
)
for cttype, key, translation_key in (
(CtType.NET_CONSUMPTION, "voltage", "net_ct_voltage"),
(CtType.PRODUCTION, "production_ct_voltage", ""),
(CtType.STORAGE, "storage_voltage", "storage_ct_voltage"),
)
]
+ [
EnvoyCTSensorEntityDescription(
key=key,
translation_key=key,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.CURRENT,
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
suggested_display_precision=3,
entity_registry_enabled_default=False,
value_fn=attrgetter("current"),
on_phase=None,
cttype=cttype,
)
for cttype, key in (
(CtType.NET_CONSUMPTION, "net_ct_current"),
(CtType.PRODUCTION, "production_ct_current"),
(CtType.STORAGE, "storage_ct_current"),
)
]
+ [
EnvoyCTSensorEntityDescription(
key=key,
translation_key=key,
device_class=SensorDeviceClass.POWER_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=2,
entity_registry_enabled_default=False,
value_fn=attrgetter("power_factor"),
on_phase=None,
cttype=cttype,
)
for cttype, key in (
(CtType.NET_CONSUMPTION, "net_ct_powerfactor"),
(CtType.PRODUCTION, "production_ct_powerfactor"),
(CtType.STORAGE, "storage_ct_powerfactor"),
)
]
+ [
EnvoyCTSensorEntityDescription(
key=key,
translation_key=(translation_key if translation_key != "" else key),
device_class=SensorDeviceClass.ENUM,
entity_category=EntityCategory.DIAGNOSTIC,
options=list(CtMeterStatus),
entity_registry_enabled_default=False,
value_fn=attrgetter("metering_status"),
on_phase=None,
cttype=cttype,
)
for cttype, key, translation_key in (
(
CtType.NET_CONSUMPTION,
"net_consumption_ct_metering_status",
"net_ct_metering_status",
),
(CtType.PRODUCTION, "production_ct_metering_status", ""),
(CtType.STORAGE, "storage_ct_metering_status", ""),
)
]
+ [
EnvoyCTSensorEntityDescription(
key=key,
translation_key=(translation_key if translation_key != "" else key),
state_class=None,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
on_phase=None,
cttype=cttype,
)
for cttype, key, translation_key in (
(
CtType.NET_CONSUMPTION,
"net_consumption_ct_status_flags",
"net_ct_status_flags",
),
(CtType.PRODUCTION, "production_ct_status_flags", ""),
(CtType.STORAGE, "storage_ct_status_flags", ""),
)
]
CT_NET_CONSUMPTION_SENSORS = (
EnvoyCTSensorEntityDescription(
key="lifetime_net_consumption",
translation_key="lifetime_net_consumption",
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.ENERGY,
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
suggested_display_precision=3,
value_fn=attrgetter("energy_delivered"),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
EnvoyCTSensorEntityDescription(
key="lifetime_net_production",
translation_key="lifetime_net_production",
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.ENERGY,
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
suggested_display_precision=3,
value_fn=attrgetter("energy_received"),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
EnvoyCTSensorEntityDescription(
key="net_consumption",
translation_key="net_consumption",
native_unit_of_measurement=UnitOfPower.WATT,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
suggested_display_precision=3,
value_fn=attrgetter("active_power"),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
EnvoyCTSensorEntityDescription(
key="frequency",
translation_key="net_ct_frequency",
native_unit_of_measurement=UnitOfFrequency.HERTZ,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.FREQUENCY,
suggested_display_precision=1,
entity_registry_enabled_default=False,
value_fn=attrgetter("frequency"),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
EnvoyCTSensorEntityDescription(
key="voltage",
translation_key="net_ct_voltage",
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.VOLTAGE,
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
suggested_display_precision=1,
entity_registry_enabled_default=False,
value_fn=attrgetter("voltage"),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
EnvoyCTSensorEntityDescription(
key="net_ct_current",
translation_key="net_ct_current",
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.CURRENT,
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
suggested_display_precision=3,
entity_registry_enabled_default=False,
value_fn=attrgetter("current"),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
EnvoyCTSensorEntityDescription(
key="net_ct_powerfactor",
translation_key="net_ct_powerfactor",
device_class=SensorDeviceClass.POWER_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=2,
entity_registry_enabled_default=False,
value_fn=attrgetter("power_factor"),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
EnvoyCTSensorEntityDescription(
key="net_consumption_ct_metering_status",
translation_key="net_ct_metering_status",
device_class=SensorDeviceClass.ENUM,
entity_category=EntityCategory.DIAGNOSTIC,
options=list(CtMeterStatus),
entity_registry_enabled_default=False,
value_fn=attrgetter("metering_status"),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
EnvoyCTSensorEntityDescription(
key="net_consumption_ct_status_flags",
translation_key="net_ct_status_flags",
state_class=None,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
on_phase=None,
cttype=CtType.NET_CONSUMPTION,
),
)
CT_PHASE_SENSORS = {
CT_NET_CONSUMPTION_PHASE_SENSORS = {
(on_phase := PHASENAMES[phase]): [
replace(
sensor,
@@ -591,7 +519,220 @@ CT_PHASE_SENSORS = {
on_phase=on_phase,
translation_placeholders={"phase_name": f"l{phase + 1}"},
)
for sensor in list(CT_SENSORS)
for sensor in list(CT_NET_CONSUMPTION_SENSORS)
]
for phase in range(3)
}
CT_PRODUCTION_SENSORS = (
EnvoyCTSensorEntityDescription(
key="production_ct_frequency",
translation_key="production_ct_frequency",
native_unit_of_measurement=UnitOfFrequency.HERTZ,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.FREQUENCY,
suggested_display_precision=1,
entity_registry_enabled_default=False,
value_fn=attrgetter("frequency"),
on_phase=None,
cttype=CtType.PRODUCTION,
),
EnvoyCTSensorEntityDescription(
key="production_ct_voltage",
translation_key="production_ct_voltage",
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.VOLTAGE,
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
suggested_display_precision=1,
entity_registry_enabled_default=False,
value_fn=attrgetter("voltage"),
on_phase=None,
cttype=CtType.PRODUCTION,
),
EnvoyCTSensorEntityDescription(
key="production_ct_current",
translation_key="production_ct_current",
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.CURRENT,
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
suggested_display_precision=3,
entity_registry_enabled_default=False,
value_fn=attrgetter("current"),
on_phase=None,
cttype=CtType.PRODUCTION,
),
EnvoyCTSensorEntityDescription(
key="production_ct_powerfactor",
translation_key="production_ct_powerfactor",
device_class=SensorDeviceClass.POWER_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=2,
entity_registry_enabled_default=False,
value_fn=attrgetter("power_factor"),
on_phase=None,
cttype=CtType.PRODUCTION,
),
EnvoyCTSensorEntityDescription(
key="production_ct_metering_status",
translation_key="production_ct_metering_status",
device_class=SensorDeviceClass.ENUM,
options=list(CtMeterStatus),
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=attrgetter("metering_status"),
on_phase=None,
cttype=CtType.PRODUCTION,
),
EnvoyCTSensorEntityDescription(
key="production_ct_status_flags",
translation_key="production_ct_status_flags",
state_class=None,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
on_phase=None,
cttype=CtType.PRODUCTION,
),
)
CT_PRODUCTION_PHASE_SENSORS = {
(on_phase := PHASENAMES[phase]): [
replace(
sensor,
key=f"{sensor.key}_l{phase + 1}",
translation_key=f"{sensor.translation_key}_phase",
entity_registry_enabled_default=False,
on_phase=on_phase,
translation_placeholders={"phase_name": f"l{phase + 1}"},
)
for sensor in list(CT_PRODUCTION_SENSORS)
]
for phase in range(3)
}
CT_STORAGE_SENSORS = (
EnvoyCTSensorEntityDescription(
key="lifetime_battery_discharged",
translation_key="lifetime_battery_discharged",
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.ENERGY,
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
suggested_display_precision=3,
value_fn=attrgetter("energy_delivered"),
on_phase=None,
cttype=CtType.STORAGE,
),
EnvoyCTSensorEntityDescription(
key="lifetime_battery_charged",
translation_key="lifetime_battery_charged",
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.ENERGY,
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
suggested_display_precision=3,
value_fn=attrgetter("energy_received"),
on_phase=None,
cttype=CtType.STORAGE,
),
EnvoyCTSensorEntityDescription(
key="battery_discharge",
translation_key="battery_discharge",
native_unit_of_measurement=UnitOfPower.WATT,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
suggested_display_precision=3,
value_fn=attrgetter("active_power"),
on_phase=None,
cttype=CtType.STORAGE,
),
EnvoyCTSensorEntityDescription(
key="storage_ct_frequency",
translation_key="storage_ct_frequency",
native_unit_of_measurement=UnitOfFrequency.HERTZ,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.FREQUENCY,
suggested_display_precision=1,
entity_registry_enabled_default=False,
value_fn=attrgetter("frequency"),
on_phase=None,
cttype=CtType.STORAGE,
),
EnvoyCTSensorEntityDescription(
key="storage_voltage",
translation_key="storage_ct_voltage",
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.VOLTAGE,
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
suggested_display_precision=1,
entity_registry_enabled_default=False,
value_fn=attrgetter("voltage"),
on_phase=None,
cttype=CtType.STORAGE,
),
EnvoyCTSensorEntityDescription(
key="storage_ct_current",
translation_key="storage_ct_current",
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.CURRENT,
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
suggested_display_precision=3,
entity_registry_enabled_default=False,
value_fn=attrgetter("current"),
on_phase=None,
cttype=CtType.STORAGE,
),
EnvoyCTSensorEntityDescription(
key="storage_ct_powerfactor",
translation_key="storage_ct_powerfactor",
device_class=SensorDeviceClass.POWER_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=2,
entity_registry_enabled_default=False,
value_fn=attrgetter("power_factor"),
on_phase=None,
cttype=CtType.STORAGE,
),
EnvoyCTSensorEntityDescription(
key="storage_ct_metering_status",
translation_key="storage_ct_metering_status",
device_class=SensorDeviceClass.ENUM,
options=list(CtMeterStatus),
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=attrgetter("metering_status"),
on_phase=None,
cttype=CtType.STORAGE,
),
EnvoyCTSensorEntityDescription(
key="storage_ct_status_flags",
translation_key="storage_ct_status_flags",
state_class=None,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
on_phase=None,
cttype=CtType.STORAGE,
),
)
CT_STORAGE_PHASE_SENSORS = {
(on_phase := PHASENAMES[phase]): [
replace(
sensor,
key=f"{sensor.key}_l{phase + 1}",
translation_key=f"{sensor.translation_key}_phase",
entity_registry_enabled_default=False,
on_phase=on_phase,
translation_placeholders={"phase_name": f"l{phase + 1}"},
)
for sensor in list(CT_STORAGE_SENSORS)
]
for phase in range(3)
}
@@ -919,14 +1060,24 @@ async def async_setup_entry(
if envoy_data.ctmeters:
entities.extend(
EnvoyCTEntity(coordinator, description)
for description in CT_SENSORS
for sensors in (
CT_NET_CONSUMPTION_SENSORS,
CT_PRODUCTION_SENSORS,
CT_STORAGE_SENSORS,
)
for description in sensors
if description.cttype in envoy_data.ctmeters
)
# Add Current Transformer phase entities
if ctmeters_phases := envoy_data.ctmeters_phases:
entities.extend(
EnvoyCTPhaseEntity(coordinator, description)
for phase, descriptions in CT_PHASE_SENSORS.items()
for sensors in (
CT_NET_CONSUMPTION_PHASE_SENSORS,
CT_PRODUCTION_PHASE_SENSORS,
CT_STORAGE_PHASE_SENSORS,
)
for phase, descriptions in sensors.items()
for description in descriptions
if (cttype := description.cttype) in ctmeters_phases
and phase in ctmeters_phases[cttype]

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import asyncio
from urllib.parse import quote
import voluptuous as vol
@@ -153,9 +152,7 @@ class HassFoscamCamera(FoscamEntity, Camera):
async def stream_source(self) -> str | None:
"""Return the stream source."""
if self._rtsp_port:
_username = quote(self._username)
_password = quote(self._password)
return f"rtsp://{_username}:{_password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
return None

View File

@@ -116,28 +116,20 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
is_open_wdr = None
is_open_hdr = None
reserve3 = product_info.get("reserve4")
model = product_info.get("model")
model_int = int(model) if model is not None else 7002
if model_int > 7001:
reserve3_int = int(reserve3) if reserve3 is not None else 0
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
if supports_wdr_adjustment_val:
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
mode = (
is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
)
is_open_wdr = bool(int(mode))
elif supports_hdr_adjustment_val:
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
mode = (
is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
)
is_open_hdr = bool(int(mode))
else:
supports_wdr_adjustment_val = False
supports_hdr_adjustment_val = False
reserve3_int = int(reserve3) if reserve3 is not None else 0
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
if supports_wdr_adjustment_val:
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
is_open_wdr = bool(int(mode))
elif supports_hdr_adjustment_val:
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
is_open_hdr = bool(int(mode))
ret_sw, software_capabilities = self.session.getSWCapabilities()
supports_speak_volume_adjustment_val = (
bool(int(software_capabilities.get("swCapabilities1")) & 32)
if ret_sw == 0

View File

@@ -481,13 +481,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
sidebar_title="climate",
sidebar_default_visible=False,
)
async_register_built_in_panel(
hass,
"home",
sidebar_icon="mdi:home",
sidebar_title="home",
sidebar_default_visible=False,
)
async_register_built_in_panel(hass, "profile")
@@ -777,9 +770,7 @@ class ManifestJSONView(HomeAssistantView):
@websocket_api.websocket_command(
{
"type": "frontend/get_icons",
vol.Required("category"): vol.In(
{"entity", "entity_component", "services", "triggers", "conditions"}
),
vol.Required("category"): vol.In({"entity", "entity_component", "services"}),
vol.Optional("integration"): vol.All(cv.ensure_list, [str]),
}
)

View File

@@ -1,14 +1,13 @@
"""The Goodwe inverter component."""
from goodwe import Inverter, InverterError, connect
from goodwe.const import GOODWE_UDP_PORT
from goodwe import InverterError, connect
from goodwe.const import GOODWE_TCP_PORT, GOODWE_UDP_PORT
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.device_registry import DeviceInfo
from .config_flow import GoodweFlowHandler
from .const import CONF_MODEL_FAMILY, DOMAIN, PLATFORMS
from .coordinator import GoodweConfigEntry, GoodweRuntimeData, GoodweUpdateCoordinator
@@ -16,22 +15,28 @@ from .coordinator import GoodweConfigEntry, GoodweRuntimeData, GoodweUpdateCoord
async def async_setup_entry(hass: HomeAssistant, entry: GoodweConfigEntry) -> bool:
"""Set up the Goodwe components from a config entry."""
host = entry.data[CONF_HOST]
port = entry.data.get(CONF_PORT, GOODWE_UDP_PORT)
model_family = entry.data[CONF_MODEL_FAMILY]
# Connect to Goodwe inverter
try:
inverter = await connect(
host=host,
port=port,
port=GOODWE_UDP_PORT,
family=model_family,
retries=10,
)
except InverterError as err:
except InverterError as err_udp:
# First try with UDP failed, trying with the TCP port
try:
inverter = await async_check_port(hass, entry, host)
inverter = await connect(
host=host,
port=GOODWE_TCP_PORT,
family=model_family,
retries=10,
)
except InverterError:
raise ConfigEntryNotReady from err
# Both ports are unavailable
raise ConfigEntryNotReady from err_udp
device_info = DeviceInfo(
configuration_url="https://www.semsportal.com",
@@ -61,23 +66,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoodweConfigEntry) -> bo
return True
async def async_check_port(
hass: HomeAssistant, entry: GoodweConfigEntry, host: str
) -> Inverter:
"""Check the communication port of the inverter, it may have changed after a firmware update."""
inverter, port = await GoodweFlowHandler.async_detect_inverter_port(host=host)
family = type(inverter).__name__
hass.config_entries.async_update_entry(
entry,
data={
CONF_HOST: host,
CONF_PORT: port,
CONF_MODEL_FAMILY: family,
},
)
return inverter
async def async_unload_entry(
hass: HomeAssistant, config_entry: GoodweConfigEntry
) -> bool:
@@ -88,31 +76,3 @@ async def async_unload_entry(
async def update_listener(hass: HomeAssistant, config_entry: GoodweConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
async def async_migrate_entry(
hass: HomeAssistant, config_entry: GoodweConfigEntry
) -> bool:
"""Migrate old config entries."""
if config_entry.version > 2:
# This means the user has downgraded from a future version
return False
if config_entry.version == 1:
# Update from version 1 to version 2 adding the PROTOCOL to the config entry
host = config_entry.data[CONF_HOST]
try:
inverter, port = await GoodweFlowHandler.async_detect_inverter_port(
host=host
)
except InverterError as err:
raise ConfigEntryNotReady from err
new_data = {
CONF_HOST: host,
CONF_PORT: port,
CONF_MODEL_FAMILY: type(inverter).__name__,
}
hass.config_entries.async_update_entry(config_entry, data=new_data, version=2)
return True

View File

@@ -5,12 +5,12 @@ from __future__ import annotations
import logging
from typing import Any
from goodwe import Inverter, InverterError, connect
from goodwe import InverterError, connect
from goodwe.const import GOODWE_TCP_PORT, GOODWE_UDP_PORT
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.const import CONF_HOST
from .const import CONF_MODEL_FAMILY, DEFAULT_NAME, DOMAIN
@@ -26,15 +26,9 @@ _LOGGER = logging.getLogger(__name__)
class GoodweFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a Goodwe config flow."""
MINOR_VERSION = 2
VERSION = 1
async def async_handle_successful_connection(
self,
inverter: Inverter,
host: str,
port: int,
) -> ConfigFlowResult:
"""Handle a successful connection storing it's values on the entry data."""
async def _handle_successful_connection(self, inverter, host):
await self.async_set_unique_id(inverter.serial_number)
self._abort_if_unique_id_configured()
@@ -42,7 +36,6 @@ class GoodweFlowHandler(ConfigFlow, domain=DOMAIN):
title=DEFAULT_NAME,
data={
CONF_HOST: host,
CONF_PORT: port,
CONF_MODEL_FAMILY: type(inverter).__name__,
},
)
@@ -55,26 +48,19 @@ class GoodweFlowHandler(ConfigFlow, domain=DOMAIN):
if user_input is not None:
host = user_input[CONF_HOST]
try:
inverter, port = await self.async_detect_inverter_port(host=host)
inverter = await connect(host=host, port=GOODWE_UDP_PORT, retries=10)
except InverterError:
errors[CONF_HOST] = "connection_error"
try:
inverter = await connect(
host=host, port=GOODWE_TCP_PORT, retries=10
)
except InverterError:
errors[CONF_HOST] = "connection_error"
else:
return await self._handle_successful_connection(inverter, host)
else:
return await self.async_handle_successful_connection(
inverter, host, port
)
return await self._handle_successful_connection(inverter, host)
return self.async_show_form(
step_id="user", data_schema=CONFIG_SCHEMA, errors=errors
)
@staticmethod
async def async_detect_inverter_port(
host: str,
) -> tuple[Inverter, int]:
"""Detects the port of the Inverter."""
port = GOODWE_UDP_PORT
try:
inverter = await connect(host=host, port=port, retries=10)
except InverterError:
port = GOODWE_TCP_PORT
inverter = await connect(host=host, port=port, retries=10)
return inverter, port

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/google",
"iot_class": "cloud_polling",
"loggers": ["googleapiclient"],
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==11.1.0"]
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==11.0.0"]
}

View File

@@ -53,9 +53,6 @@ def _convert_api_item(item: dict[str, str]) -> TodoItem:
# Due dates are returned always in UTC so we only need to
# parse the date portion which will be interpreted as a a local date.
due = datetime.fromisoformat(due_str).date()
completed: datetime | None = None
if (completed_str := item.get("completed")) is not None:
completed = datetime.fromisoformat(completed_str)
return TodoItem(
summary=item["title"],
uid=item["id"],
@@ -64,7 +61,6 @@ def _convert_api_item(item: dict[str, str]) -> TodoItem:
TodoItemStatus.NEEDS_ACTION,
),
due=due,
completed=completed,
description=item.get("notes"),
)

View File

@@ -1,84 +0,0 @@
"""The Google Weather integration."""
from __future__ import annotations
import asyncio
from google_weather_api import GoogleWeatherApi
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_REFERRER
from .coordinator import (
GoogleWeatherConfigEntry,
GoogleWeatherCurrentConditionsCoordinator,
GoogleWeatherDailyForecastCoordinator,
GoogleWeatherHourlyForecastCoordinator,
GoogleWeatherRuntimeData,
GoogleWeatherSubEntryRuntimeData,
)
_PLATFORMS: list[Platform] = [Platform.WEATHER]
async def async_setup_entry(
hass: HomeAssistant, entry: GoogleWeatherConfigEntry
) -> bool:
"""Set up Google Weather from a config entry."""
api = GoogleWeatherApi(
session=async_get_clientsession(hass),
api_key=entry.data[CONF_API_KEY],
referrer=entry.data.get(CONF_REFERRER),
language_code=hass.config.language,
)
subentries_runtime_data: dict[str, GoogleWeatherSubEntryRuntimeData] = {}
for subentry in entry.subentries.values():
subentry_runtime_data = GoogleWeatherSubEntryRuntimeData(
coordinator_observation=GoogleWeatherCurrentConditionsCoordinator(
hass, entry, subentry, api
),
coordinator_daily_forecast=GoogleWeatherDailyForecastCoordinator(
hass, entry, subentry, api
),
coordinator_hourly_forecast=GoogleWeatherHourlyForecastCoordinator(
hass, entry, subentry, api
),
)
subentries_runtime_data[subentry.subentry_id] = subentry_runtime_data
tasks = [
coro
for subentry_runtime_data in subentries_runtime_data.values()
for coro in (
subentry_runtime_data.coordinator_observation.async_config_entry_first_refresh(),
subentry_runtime_data.coordinator_daily_forecast.async_config_entry_first_refresh(),
subentry_runtime_data.coordinator_hourly_forecast.async_config_entry_first_refresh(),
)
]
await asyncio.gather(*tasks)
entry.runtime_data = GoogleWeatherRuntimeData(
api=api,
subentries_runtime_data=subentries_runtime_data,
)
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
entry.async_on_unload(entry.add_update_listener(async_update_options))
return True
async def async_unload_entry(
hass: HomeAssistant, entry: GoogleWeatherConfigEntry
) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
async def async_update_options(
hass: HomeAssistant, entry: GoogleWeatherConfigEntry
) -> None:
"""Update options."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -1,198 +0,0 @@
"""Config flow for the Google Weather integration."""
from __future__ import annotations
import logging
from typing import Any
from google_weather_api import GoogleWeatherApi, GoogleWeatherApiError
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigEntryState,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryFlow,
SubentryFlowResult,
)
from homeassistant.const import (
CONF_API_KEY,
CONF_LATITUDE,
CONF_LOCATION,
CONF_LONGITUDE,
CONF_NAME,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import section
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import LocationSelector, LocationSelectorConfig
from .const import CONF_REFERRER, DOMAIN, SECTION_API_KEY_OPTIONS
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Optional(SECTION_API_KEY_OPTIONS): section(
vol.Schema({vol.Optional(CONF_REFERRER): str}), {"collapsed": True}
),
}
)
async def _validate_input(
user_input: dict[str, Any],
api: GoogleWeatherApi,
errors: dict[str, str],
description_placeholders: dict[str, str],
) -> bool:
try:
await api.async_get_current_conditions(
latitude=user_input[CONF_LOCATION][CONF_LATITUDE],
longitude=user_input[CONF_LOCATION][CONF_LONGITUDE],
)
except GoogleWeatherApiError as err:
errors["base"] = "cannot_connect"
description_placeholders["error_message"] = str(err)
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return True
return False
def _get_location_schema(hass: HomeAssistant) -> vol.Schema:
"""Return the schema for a location with default values from the hass config."""
return vol.Schema(
{
vol.Required(CONF_NAME, default=hass.config.location_name): str,
vol.Required(
CONF_LOCATION,
default={
CONF_LATITUDE: hass.config.latitude,
CONF_LONGITUDE: hass.config.longitude,
},
): LocationSelector(LocationSelectorConfig(radius=False)),
}
)
def _is_location_already_configured(
hass: HomeAssistant, new_data: dict[str, float], epsilon: float = 1e-4
) -> bool:
"""Check if the location is already configured."""
for entry in hass.config_entries.async_entries(DOMAIN):
for subentry in entry.subentries.values():
# A more accurate way is to use the haversine formula, but for simplicity
# we use a simple distance check. The epsilon value is small anyway.
# This is mostly to capture cases where the user has slightly moved the location pin.
if (
abs(subentry.data[CONF_LATITUDE] - new_data[CONF_LATITUDE]) <= epsilon
and abs(subentry.data[CONF_LONGITUDE] - new_data[CONF_LONGITUDE])
<= epsilon
):
return True
return False
class GoogleWeatherConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Google Weather."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
description_placeholders: dict[str, str] = {
"api_key_url": "https://developers.google.com/maps/documentation/weather/get-api-key",
"restricting_api_keys_url": "https://developers.google.com/maps/api-security-best-practices#restricting-api-keys",
}
if user_input is not None:
api_key = user_input[CONF_API_KEY]
referrer = user_input.get(SECTION_API_KEY_OPTIONS, {}).get(CONF_REFERRER)
self._async_abort_entries_match({CONF_API_KEY: api_key})
if _is_location_already_configured(self.hass, user_input[CONF_LOCATION]):
return self.async_abort(reason="already_configured")
api = GoogleWeatherApi(
session=async_get_clientsession(self.hass),
api_key=api_key,
referrer=referrer,
language_code=self.hass.config.language,
)
if await _validate_input(user_input, api, errors, description_placeholders):
return self.async_create_entry(
title="Google Weather",
data={
CONF_API_KEY: api_key,
CONF_REFERRER: referrer,
},
subentries=[
{
"subentry_type": "location",
"data": user_input[CONF_LOCATION],
"title": user_input[CONF_NAME],
"unique_id": None,
},
],
)
else:
user_input = {}
schema = STEP_USER_DATA_SCHEMA.schema.copy()
schema.update(_get_location_schema(self.hass).schema)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(schema), user_input
),
errors=errors,
description_placeholders=description_placeholders,
)
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {"location": LocationSubentryFlowHandler}
class LocationSubentryFlowHandler(ConfigSubentryFlow):
"""Handle a subentry flow for location."""
async def async_step_location(
self,
user_input: dict[str, Any] | None = None,
) -> SubentryFlowResult:
"""Handle the location step."""
if self._get_entry().state != ConfigEntryState.LOADED:
return self.async_abort(reason="entry_not_loaded")
errors: dict[str, str] = {}
description_placeholders: dict[str, str] = {}
if user_input is not None:
if _is_location_already_configured(self.hass, user_input[CONF_LOCATION]):
return self.async_abort(reason="already_configured")
api: GoogleWeatherApi = self._get_entry().runtime_data.api
if await _validate_input(user_input, api, errors, description_placeholders):
return self.async_create_entry(
title=user_input[CONF_NAME],
data=user_input[CONF_LOCATION],
)
else:
user_input = {}
return self.async_show_form(
step_id="location",
data_schema=self.add_suggested_values_to_schema(
_get_location_schema(self.hass), user_input
),
errors=errors,
description_placeholders=description_placeholders,
)
async_step_user = async_step_location

View File

@@ -1,8 +0,0 @@
"""Constants for the Google Weather integration."""
from typing import Final
DOMAIN = "google_weather"
SECTION_API_KEY_OPTIONS: Final = "api_key_options"
CONF_REFERRER: Final = "referrer"

View File

@@ -1,169 +0,0 @@
"""The Google Weather coordinator."""
from __future__ import annotations
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from datetime import timedelta
import logging
from typing import TypeVar
from google_weather_api import (
CurrentConditionsResponse,
DailyForecastResponse,
GoogleWeatherApi,
GoogleWeatherApiError,
HourlyForecastResponse,
)
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import (
TimestampDataUpdateCoordinator,
UpdateFailed,
)
_LOGGER = logging.getLogger(__name__)
T = TypeVar(
"T",
bound=(
CurrentConditionsResponse
| DailyForecastResponse
| HourlyForecastResponse
| None
),
)
@dataclass
class GoogleWeatherSubEntryRuntimeData:
"""Runtime data for a Google Weather sub-entry."""
coordinator_observation: GoogleWeatherCurrentConditionsCoordinator
coordinator_daily_forecast: GoogleWeatherDailyForecastCoordinator
coordinator_hourly_forecast: GoogleWeatherHourlyForecastCoordinator
@dataclass
class GoogleWeatherRuntimeData:
"""Runtime data for the Google Weather integration."""
api: GoogleWeatherApi
subentries_runtime_data: dict[str, GoogleWeatherSubEntryRuntimeData]
type GoogleWeatherConfigEntry = ConfigEntry[GoogleWeatherRuntimeData]
class GoogleWeatherBaseCoordinator(TimestampDataUpdateCoordinator[T]):
"""Base class for Google Weather coordinators."""
config_entry: GoogleWeatherConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: GoogleWeatherConfigEntry,
subentry: ConfigSubentry,
data_type_name: str,
update_interval: timedelta,
api_method: Callable[..., Awaitable[T]],
) -> None:
"""Initialize the data updater."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=f"Google Weather {data_type_name} coordinator for {subentry.title}",
update_interval=update_interval,
)
self.subentry = subentry
self._data_type_name = data_type_name
self._api_method = api_method
async def _async_update_data(self) -> T:
"""Fetch data from API and handle errors."""
try:
return await self._api_method(
self.subentry.data[CONF_LATITUDE],
self.subentry.data[CONF_LONGITUDE],
)
except GoogleWeatherApiError as err:
_LOGGER.error(
"Error fetching %s for %s: %s",
self._data_type_name,
self.subentry.title,
err,
)
raise UpdateFailed(f"Error fetching {self._data_type_name}") from err
class GoogleWeatherCurrentConditionsCoordinator(
GoogleWeatherBaseCoordinator[CurrentConditionsResponse]
):
"""Handle fetching current weather conditions."""
def __init__(
self,
hass: HomeAssistant,
config_entry: GoogleWeatherConfigEntry,
subentry: ConfigSubentry,
api: GoogleWeatherApi,
) -> None:
"""Initialize the data updater."""
super().__init__(
hass,
config_entry,
subentry,
"current weather conditions",
timedelta(minutes=15),
api.async_get_current_conditions,
)
class GoogleWeatherDailyForecastCoordinator(
GoogleWeatherBaseCoordinator[DailyForecastResponse]
):
"""Handle fetching daily weather forecast."""
def __init__(
self,
hass: HomeAssistant,
config_entry: GoogleWeatherConfigEntry,
subentry: ConfigSubentry,
api: GoogleWeatherApi,
) -> None:
"""Initialize the data updater."""
super().__init__(
hass,
config_entry,
subentry,
"daily weather forecast",
timedelta(hours=1),
api.async_get_daily_forecast,
)
class GoogleWeatherHourlyForecastCoordinator(
GoogleWeatherBaseCoordinator[HourlyForecastResponse]
):
"""Handle fetching hourly weather forecast."""
def __init__(
self,
hass: HomeAssistant,
config_entry: GoogleWeatherConfigEntry,
subentry: ConfigSubentry,
api: GoogleWeatherApi,
) -> None:
"""Initialize the data updater."""
super().__init__(
hass,
config_entry,
subentry,
"hourly weather forecast",
timedelta(hours=1),
api.async_get_hourly_forecast,
)

View File

@@ -1,28 +0,0 @@
"""Base entity for Google Weather."""
from __future__ import annotations
from homeassistant.config_entries import ConfigSubentry
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
from .coordinator import GoogleWeatherConfigEntry
class GoogleWeatherBaseEntity(Entity):
"""Base entity for all Google Weather entities."""
_attr_has_entity_name = True
def __init__(
self, config_entry: GoogleWeatherConfigEntry, subentry: ConfigSubentry
) -> None:
"""Initialize base entity."""
self._attr_unique_id = subentry.subentry_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, subentry.subentry_id)},
name=subentry.title,
manufacturer="Google",
entry_type=DeviceEntryType.SERVICE,
)

View File

@@ -1,12 +0,0 @@
{
"domain": "google_weather",
"name": "Google Weather",
"codeowners": ["@tronikos"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/google_weather",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["google_weather_api"],
"quality_scale": "bronze",
"requirements": ["python-google-weather-api==0.0.4"]
}

View File

@@ -1,82 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: No actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: No actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: No events subscribed.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: No actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: No configuration options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: todo
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: No discovery.
discovery:
status: exempt
comment: No discovery.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices:
status: exempt
comment: No physical devices.
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: N/A
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: No repairs.
stale-devices:
status: exempt
comment: N/A
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -1,65 +0,0 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
},
"error": {
"cannot_connect": "Unable to connect to the Google Weather API:\n\n{error_message}",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"location": "[%key:common::config_flow::data::location%]",
"name": "[%key:common::config_flow::data::name%]"
},
"data_description": {
"api_key": "A unique alphanumeric string that associates your Google billing account with Google Weather API",
"location": "Location coordinates",
"name": "Location name"
},
"description": "Get your API key from [here]({api_key_url}).",
"sections": {
"api_key_options": {
"data": {
"referrer": "HTTP referrer"
},
"data_description": {
"referrer": "Specify this only if the API key has a [website application restriction]({restricting_api_keys_url})"
},
"name": "Optional API key options"
}
}
}
}
},
"config_subentries": {
"location": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
"entry_not_loaded": "Cannot add things while the configuration is disabled."
},
"entry_type": "Location",
"error": {
"cannot_connect": "[%key:component::google_weather::config::error::cannot_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"initiate_flow": {
"user": "Add location"
},
"step": {
"location": {
"data": {
"location": "[%key:common::config_flow::data::location%]",
"name": "[%key:common::config_flow::data::name%]"
},
"data_description": {
"location": "[%key:component::google_weather::config::step::user::data_description::location%]",
"name": "[%key:component::google_weather::config::step::user::data_description::name%]"
}
}
}
}
}
}

View File

@@ -1,366 +0,0 @@
"""Weather entity."""
from __future__ import annotations
from google_weather_api import (
DailyForecastResponse,
HourlyForecastResponse,
WeatherCondition,
)
from homeassistant.components.weather import (
ATTR_CONDITION_CLEAR_NIGHT,
ATTR_CONDITION_CLOUDY,
ATTR_CONDITION_HAIL,
ATTR_CONDITION_LIGHTNING_RAINY,
ATTR_CONDITION_PARTLYCLOUDY,
ATTR_CONDITION_POURING,
ATTR_CONDITION_RAINY,
ATTR_CONDITION_SNOWY,
ATTR_CONDITION_SNOWY_RAINY,
ATTR_CONDITION_SUNNY,
ATTR_CONDITION_WINDY,
ATTR_FORECAST_CLOUD_COVERAGE,
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_HUMIDITY,
ATTR_FORECAST_IS_DAYTIME,
ATTR_FORECAST_NATIVE_APPARENT_TEMP,
ATTR_FORECAST_NATIVE_DEW_POINT,
ATTR_FORECAST_NATIVE_PRECIPITATION,
ATTR_FORECAST_NATIVE_PRESSURE,
ATTR_FORECAST_NATIVE_TEMP,
ATTR_FORECAST_NATIVE_TEMP_LOW,
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
ATTR_FORECAST_NATIVE_WIND_SPEED,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TIME,
ATTR_FORECAST_UV_INDEX,
ATTR_FORECAST_WIND_BEARING,
CoordinatorWeatherEntity,
Forecast,
WeatherEntityFeature,
)
from homeassistant.config_entries import ConfigSubentry
from homeassistant.const import (
UnitOfLength,
UnitOfPrecipitationDepth,
UnitOfPressure,
UnitOfSpeed,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import (
GoogleWeatherConfigEntry,
GoogleWeatherCurrentConditionsCoordinator,
GoogleWeatherDailyForecastCoordinator,
GoogleWeatherHourlyForecastCoordinator,
)
from .entity import GoogleWeatherBaseEntity
PARALLEL_UPDATES = 0
# Maps https://developers.google.com/maps/documentation/weather/weather-condition-icons
# to https://developers.home-assistant.io/docs/core/entity/weather/#recommended-values-for-state-and-condition
_CONDITION_MAP: dict[WeatherCondition.Type, str | None] = {
WeatherCondition.Type.TYPE_UNSPECIFIED: None,
WeatherCondition.Type.CLEAR: ATTR_CONDITION_SUNNY,
WeatherCondition.Type.MOSTLY_CLEAR: ATTR_CONDITION_PARTLYCLOUDY,
WeatherCondition.Type.PARTLY_CLOUDY: ATTR_CONDITION_PARTLYCLOUDY,
WeatherCondition.Type.MOSTLY_CLOUDY: ATTR_CONDITION_CLOUDY,
WeatherCondition.Type.CLOUDY: ATTR_CONDITION_CLOUDY,
WeatherCondition.Type.WINDY: ATTR_CONDITION_WINDY,
WeatherCondition.Type.WIND_AND_RAIN: ATTR_CONDITION_RAINY,
WeatherCondition.Type.LIGHT_RAIN_SHOWERS: ATTR_CONDITION_RAINY,
WeatherCondition.Type.CHANCE_OF_SHOWERS: ATTR_CONDITION_RAINY,
WeatherCondition.Type.SCATTERED_SHOWERS: ATTR_CONDITION_RAINY,
WeatherCondition.Type.RAIN_SHOWERS: ATTR_CONDITION_RAINY,
WeatherCondition.Type.HEAVY_RAIN_SHOWERS: ATTR_CONDITION_POURING,
WeatherCondition.Type.LIGHT_TO_MODERATE_RAIN: ATTR_CONDITION_RAINY,
WeatherCondition.Type.MODERATE_TO_HEAVY_RAIN: ATTR_CONDITION_POURING,
WeatherCondition.Type.RAIN: ATTR_CONDITION_RAINY,
WeatherCondition.Type.LIGHT_RAIN: ATTR_CONDITION_RAINY,
WeatherCondition.Type.HEAVY_RAIN: ATTR_CONDITION_POURING,
WeatherCondition.Type.RAIN_PERIODICALLY_HEAVY: ATTR_CONDITION_POURING,
WeatherCondition.Type.LIGHT_SNOW_SHOWERS: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.CHANCE_OF_SNOW_SHOWERS: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.SCATTERED_SNOW_SHOWERS: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.SNOW_SHOWERS: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.HEAVY_SNOW_SHOWERS: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.LIGHT_TO_MODERATE_SNOW: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.MODERATE_TO_HEAVY_SNOW: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.SNOW: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.LIGHT_SNOW: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.HEAVY_SNOW: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.SNOWSTORM: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.SNOW_PERIODICALLY_HEAVY: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.HEAVY_SNOW_STORM: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.BLOWING_SNOW: ATTR_CONDITION_SNOWY,
WeatherCondition.Type.RAIN_AND_SNOW: ATTR_CONDITION_SNOWY_RAINY,
WeatherCondition.Type.HAIL: ATTR_CONDITION_HAIL,
WeatherCondition.Type.HAIL_SHOWERS: ATTR_CONDITION_HAIL,
WeatherCondition.Type.THUNDERSTORM: ATTR_CONDITION_LIGHTNING_RAINY,
WeatherCondition.Type.THUNDERSHOWER: ATTR_CONDITION_LIGHTNING_RAINY,
WeatherCondition.Type.LIGHT_THUNDERSTORM_RAIN: ATTR_CONDITION_LIGHTNING_RAINY,
WeatherCondition.Type.SCATTERED_THUNDERSTORMS: ATTR_CONDITION_LIGHTNING_RAINY,
WeatherCondition.Type.HEAVY_THUNDERSTORM: ATTR_CONDITION_LIGHTNING_RAINY,
}
def _get_condition(
api_condition: WeatherCondition.Type, is_daytime: bool
) -> str | None:
"""Map Google Weather condition to Home Assistant condition."""
cond = _CONDITION_MAP[api_condition]
if cond == ATTR_CONDITION_SUNNY and not is_daytime:
return ATTR_CONDITION_CLEAR_NIGHT
return cond
async def async_setup_entry(
hass: HomeAssistant,
entry: GoogleWeatherConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Add a weather entity from a config_entry."""
for subentry in entry.subentries.values():
async_add_entities(
[GoogleWeatherEntity(entry, subentry)],
config_subentry_id=subentry.subentry_id,
)
class GoogleWeatherEntity(
CoordinatorWeatherEntity[
GoogleWeatherCurrentConditionsCoordinator,
GoogleWeatherDailyForecastCoordinator,
GoogleWeatherHourlyForecastCoordinator,
GoogleWeatherDailyForecastCoordinator,
],
GoogleWeatherBaseEntity,
):
"""Representation of a Google Weather entity."""
_attr_attribution = "Data from Google Weather"
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
_attr_native_pressure_unit = UnitOfPressure.MBAR
_attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
_attr_native_visibility_unit = UnitOfLength.KILOMETERS
_attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
_attr_name = None
_attr_supported_features = (
WeatherEntityFeature.FORECAST_DAILY
| WeatherEntityFeature.FORECAST_HOURLY
| WeatherEntityFeature.FORECAST_TWICE_DAILY
)
def __init__(
self,
entry: GoogleWeatherConfigEntry,
subentry: ConfigSubentry,
) -> None:
"""Initialize the weather entity."""
subentry_runtime_data = entry.runtime_data.subentries_runtime_data[
subentry.subentry_id
]
super().__init__(
observation_coordinator=subentry_runtime_data.coordinator_observation,
daily_coordinator=subentry_runtime_data.coordinator_daily_forecast,
hourly_coordinator=subentry_runtime_data.coordinator_hourly_forecast,
twice_daily_coordinator=subentry_runtime_data.coordinator_daily_forecast,
)
GoogleWeatherBaseEntity.__init__(self, entry, subentry)
@property
def condition(self) -> str | None:
"""Return the current condition."""
return _get_condition(
self.coordinator.data.weather_condition.type,
self.coordinator.data.is_daytime,
)
@property
def native_temperature(self) -> float:
"""Return the temperature."""
return self.coordinator.data.temperature.degrees
@property
def native_apparent_temperature(self) -> float:
"""Return the apparent temperature."""
return self.coordinator.data.feels_like_temperature.degrees
@property
def native_dew_point(self) -> float:
"""Return the dew point."""
return self.coordinator.data.dew_point.degrees
@property
def humidity(self) -> int:
"""Return the humidity."""
return self.coordinator.data.relative_humidity
@property
def uv_index(self) -> float:
"""Return the UV index."""
return float(self.coordinator.data.uv_index)
@property
def native_pressure(self) -> float:
"""Return the pressure."""
return self.coordinator.data.air_pressure.mean_sea_level_millibars
@property
def native_wind_gust_speed(self) -> float:
"""Return the wind gust speed."""
return self.coordinator.data.wind.gust.value
@property
def native_wind_speed(self) -> float:
"""Return the wind speed."""
return self.coordinator.data.wind.speed.value
@property
def wind_bearing(self) -> int:
"""Return the wind bearing."""
return self.coordinator.data.wind.direction.degrees
@property
def native_visibility(self) -> float:
"""Return the visibility."""
return self.coordinator.data.visibility.distance
@property
def cloud_coverage(self) -> float:
"""Return the Cloud coverage in %."""
return float(self.coordinator.data.cloud_cover)
@callback
def _async_forecast_daily(self) -> list[Forecast] | None:
"""Return the daily forecast in native units."""
coordinator = self.forecast_coordinators["daily"]
assert coordinator
daily_data = coordinator.data
assert isinstance(daily_data, DailyForecastResponse)
return [
{
ATTR_FORECAST_CONDITION: _get_condition(
item.daytime_forecast.weather_condition.type, is_daytime=True
),
ATTR_FORECAST_TIME: item.interval.start_time,
ATTR_FORECAST_HUMIDITY: item.daytime_forecast.relative_humidity,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: max(
item.daytime_forecast.precipitation.probability.percent,
item.nighttime_forecast.precipitation.probability.percent,
),
ATTR_FORECAST_CLOUD_COVERAGE: item.daytime_forecast.cloud_cover,
ATTR_FORECAST_NATIVE_PRECIPITATION: (
item.daytime_forecast.precipitation.qpf.quantity
+ item.nighttime_forecast.precipitation.qpf.quantity
),
ATTR_FORECAST_NATIVE_TEMP: item.max_temperature.degrees,
ATTR_FORECAST_NATIVE_TEMP_LOW: item.min_temperature.degrees,
ATTR_FORECAST_NATIVE_APPARENT_TEMP: (
item.feels_like_max_temperature.degrees
),
ATTR_FORECAST_WIND_BEARING: item.daytime_forecast.wind.direction.degrees,
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: max(
item.daytime_forecast.wind.gust.value,
item.nighttime_forecast.wind.gust.value,
),
ATTR_FORECAST_NATIVE_WIND_SPEED: max(
item.daytime_forecast.wind.speed.value,
item.nighttime_forecast.wind.speed.value,
),
ATTR_FORECAST_UV_INDEX: item.daytime_forecast.uv_index,
}
for item in daily_data.forecast_days
]
@callback
def _async_forecast_hourly(self) -> list[Forecast] | None:
"""Return the hourly forecast in native units."""
coordinator = self.forecast_coordinators["hourly"]
assert coordinator
hourly_data = coordinator.data
assert isinstance(hourly_data, HourlyForecastResponse)
return [
{
ATTR_FORECAST_CONDITION: _get_condition(
item.weather_condition.type, item.is_daytime
),
ATTR_FORECAST_TIME: item.interval.start_time,
ATTR_FORECAST_HUMIDITY: item.relative_humidity,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: item.precipitation.probability.percent,
ATTR_FORECAST_CLOUD_COVERAGE: item.cloud_cover,
ATTR_FORECAST_NATIVE_PRECIPITATION: item.precipitation.qpf.quantity,
ATTR_FORECAST_NATIVE_PRESSURE: item.air_pressure.mean_sea_level_millibars,
ATTR_FORECAST_NATIVE_TEMP: item.temperature.degrees,
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item.feels_like_temperature.degrees,
ATTR_FORECAST_WIND_BEARING: item.wind.direction.degrees,
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: item.wind.gust.value,
ATTR_FORECAST_NATIVE_WIND_SPEED: item.wind.speed.value,
ATTR_FORECAST_NATIVE_DEW_POINT: item.dew_point.degrees,
ATTR_FORECAST_UV_INDEX: item.uv_index,
ATTR_FORECAST_IS_DAYTIME: item.is_daytime,
}
for item in hourly_data.forecast_hours
]
@callback
def _async_forecast_twice_daily(self) -> list[Forecast] | None:
"""Return the twice daily forecast in native units."""
coordinator = self.forecast_coordinators["twice_daily"]
assert coordinator
daily_data = coordinator.data
assert isinstance(daily_data, DailyForecastResponse)
forecasts: list[Forecast] = []
for item in daily_data.forecast_days:
# Process daytime forecast
day_forecast = item.daytime_forecast
forecasts.append(
{
ATTR_FORECAST_CONDITION: _get_condition(
day_forecast.weather_condition.type, is_daytime=True
),
ATTR_FORECAST_TIME: day_forecast.interval.start_time,
ATTR_FORECAST_HUMIDITY: day_forecast.relative_humidity,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: day_forecast.precipitation.probability.percent,
ATTR_FORECAST_CLOUD_COVERAGE: day_forecast.cloud_cover,
ATTR_FORECAST_NATIVE_PRECIPITATION: day_forecast.precipitation.qpf.quantity,
ATTR_FORECAST_NATIVE_TEMP: item.max_temperature.degrees,
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item.feels_like_max_temperature.degrees,
ATTR_FORECAST_WIND_BEARING: day_forecast.wind.direction.degrees,
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: day_forecast.wind.gust.value,
ATTR_FORECAST_NATIVE_WIND_SPEED: day_forecast.wind.speed.value,
ATTR_FORECAST_UV_INDEX: day_forecast.uv_index,
ATTR_FORECAST_IS_DAYTIME: True,
}
)
# Process nighttime forecast
night_forecast = item.nighttime_forecast
forecasts.append(
{
ATTR_FORECAST_CONDITION: _get_condition(
night_forecast.weather_condition.type, is_daytime=False
),
ATTR_FORECAST_TIME: night_forecast.interval.start_time,
ATTR_FORECAST_HUMIDITY: night_forecast.relative_humidity,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: night_forecast.precipitation.probability.percent,
ATTR_FORECAST_CLOUD_COVERAGE: night_forecast.cloud_cover,
ATTR_FORECAST_NATIVE_PRECIPITATION: night_forecast.precipitation.qpf.quantity,
ATTR_FORECAST_NATIVE_TEMP: item.min_temperature.degrees,
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item.feels_like_min_temperature.degrees,
ATTR_FORECAST_WIND_BEARING: night_forecast.wind.direction.degrees,
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: night_forecast.wind.gust.value,
ATTR_FORECAST_NATIVE_WIND_SPEED: night_forecast.wind.speed.value,
ATTR_FORECAST_UV_INDEX: night_forecast.uv_index,
ATTR_FORECAST_IS_DAYTIME: False,
}
)
return forecasts

View File

@@ -0,0 +1,77 @@
"""Support for the Hive alarm."""
from __future__ import annotations
from datetime import timedelta
from homeassistant.components.alarm_control_panel import (
AlarmControlPanelEntity,
AlarmControlPanelEntityFeature,
AlarmControlPanelState,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HiveConfigEntry
from .entity import HiveEntity
PARALLEL_UPDATES = 0
SCAN_INTERVAL = timedelta(seconds=15)
HIVETOHA = {
"home": AlarmControlPanelState.DISARMED,
"asleep": AlarmControlPanelState.ARMED_NIGHT,
"away": AlarmControlPanelState.ARMED_AWAY,
"sos": AlarmControlPanelState.TRIGGERED,
}
async def async_setup_entry(
hass: HomeAssistant,
entry: HiveConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Hive thermostat based on a config entry."""
hive = entry.runtime_data
if devices := hive.session.deviceList.get("alarm_control_panel"):
async_add_entities(
[HiveAlarmControlPanelEntity(hive, dev) for dev in devices], True
)
class HiveAlarmControlPanelEntity(HiveEntity, AlarmControlPanelEntity):
"""Representation of a Hive alarm."""
_attr_supported_features = (
AlarmControlPanelEntityFeature.ARM_NIGHT
| AlarmControlPanelEntityFeature.ARM_AWAY
| AlarmControlPanelEntityFeature.TRIGGER
)
_attr_code_arm_required = False
async def async_alarm_disarm(self, code: str | None = None) -> None:
"""Send disarm command."""
await self.hive.alarm.setMode(self.device, "home")
async def async_alarm_arm_night(self, code: str | None = None) -> None:
"""Send arm night command."""
await self.hive.alarm.setMode(self.device, "asleep")
async def async_alarm_arm_away(self, code: str | None = None) -> None:
"""Send arm away command."""
await self.hive.alarm.setMode(self.device, "away")
async def async_alarm_trigger(self, code: str | None = None) -> None:
"""Send alarm trigger command."""
await self.hive.alarm.setMode(self.device, "sos")
async def async_update(self) -> None:
"""Update all Node data from Hive."""
await self.hive.session.updateData(self.device)
self.device = await self.hive.alarm.getAlarm(self.device)
self._attr_available = self.device["deviceData"].get("online")
if self._attr_available:
if self.device["status"]["state"]:
self._attr_alarm_state = AlarmControlPanelState.TRIGGERED
else:
self._attr_alarm_state = HIVETOHA[self.device["status"]["mode"]]

View File

@@ -11,6 +11,7 @@ CONFIG_ENTRY_VERSION = 1
DEFAULT_NAME = "Hive"
DOMAIN = "hive"
PLATFORMS = [
Platform.ALARM_CONTROL_PANEL,
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.LIGHT,
@@ -19,6 +20,7 @@ PLATFORMS = [
Platform.WATER_HEATER,
]
PLATFORM_LOOKUP = {
Platform.ALARM_CONTROL_PANEL: "alarm_control_panel",
Platform.BINARY_SENSOR: "binary_sensor",
Platform.CLIMATE: "climate",
Platform.LIGHT: "light",

View File

@@ -9,5 +9,5 @@
},
"iot_class": "cloud_polling",
"loggers": ["apyhiveapi"],
"requirements": ["pyhive-integration==1.0.7"]
"requirements": ["pyhive-integration==1.0.6"]
}

View File

@@ -7,7 +7,7 @@ from collections import defaultdict
from collections.abc import Callable
from dataclasses import dataclass
import logging
from typing import Any
from typing import Any, cast
from aiohomeconnect.client import Client as HomeConnectClient
from aiohomeconnect.model import (
@@ -247,15 +247,14 @@ class HomeConnectCoordinator(
value=event.value,
)
else:
event_value = event.value
if event_key in (
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
) and isinstance(event_value, str):
):
await self.update_options(
event_message_ha_id,
event_key,
ProgramKey(event_value),
ProgramKey(cast(str, event.value)),
)
events[event_key] = event
self._call_event_listener(event_message)

View File

@@ -14,6 +14,7 @@ from aiohomeconnect.model.error import (
TooManyRequestsError,
)
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
@@ -61,8 +62,10 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]):
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self.update_native_value()
available = self._attr_available = self.appliance.info.connected
self.async_write_ha_state()
_LOGGER.debug("Updated %s", self)
state = STATE_UNAVAILABLE if not available else self.state
_LOGGER.debug("Updated %s, new state: %s", self.entity_id, state)
@property
def bsh_key(self) -> str:
@@ -77,7 +80,7 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]):
as event updates should take precedence over the coordinator
refresh.
"""
return self.appliance.info.connected and self._attr_available
return self._attr_available
class HomeConnectOptionEntity(HomeConnectEntity):

View File

@@ -22,6 +22,6 @@
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],
"quality_scale": "platinum",
"requirements": ["aiohomeconnect==0.23.1"],
"requirements": ["aiohomeconnect==0.23.0"],
"zeroconf": ["_homeconnect._tcp.local."]
}

View File

@@ -412,8 +412,8 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity):
"""Set the program value."""
event = self.appliance.events.get(cast(EventKey, self.bsh_key))
self._attr_current_option = (
PROGRAMS_TRANSLATION_KEYS_MAP.get(ProgramKey(event_value))
if event and isinstance(event_value := event.value, str)
PROGRAMS_TRANSLATION_KEYS_MAP.get(cast(ProgramKey, event.value))
if event
else None
)

View File

@@ -556,11 +556,8 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity):
status = self.appliance.status[cast(StatusKey, self.bsh_key)].value
self._update_native_value(status)
def _update_native_value(self, status: str | float | None) -> None:
def _update_native_value(self, status: str | float) -> None:
"""Set the value of the sensor based on the given value."""
if status is None:
self._attr_native_value = None
return
match self.device_class:
case SensorDeviceClass.TIMESTAMP:
self._attr_native_value = dt_util.utcnow() + timedelta(

View File

@@ -1237,7 +1237,7 @@
"message": "Error obtaining data from the API: {error}"
},
"oauth2_implementation_unavailable": {
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
"message": "OAuth2 implementation temporarily unavailable, will retry"
},
"pause_program": {
"message": "Error pausing program: {error}"

View File

@@ -76,18 +76,9 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
"""Mixin for Home Assistant Connect ZBT-2 firmware methods."""
context: ConfigFlowContext
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
ZIGBEE_BAUDRATE = 460800
# Early ZBT-2 samples used RTS/DTR to trigger the bootloader, later ones use the
# baudrate method. Since the two are mutually exclusive we just use both.
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE]
APPLICATION_PROBE_METHODS = [
(ApplicationType.GECKO_BOOTLOADER, 115200),
(ApplicationType.EZSP, ZIGBEE_BAUDRATE),
(ApplicationType.SPINEL, 460800),
]
async def async_step_install_zigbee_firmware(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:

View File

@@ -6,12 +6,6 @@
"dependencies": ["hardware", "usb", "homeassistant_hardware"],
"documentation": "https://www.home-assistant.io/integrations/homeassistant_connect_zbt2",
"integration_type": "hardware",
"loggers": [
"bellows",
"universal_silabs_flasher",
"zigpy.serial",
"serial_asyncio_fast"
],
"quality_scale": "bronze",
"usb": [
{

View File

@@ -14,6 +14,7 @@ from homeassistant.components.homeassistant_hardware.update import (
from homeassistant.components.homeassistant_hardware.util import (
ApplicationType,
FirmwareInfo,
ResetTarget,
)
from homeassistant.components.update import UpdateDeviceClass
from homeassistant.const import EntityCategory
@@ -23,7 +24,6 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HomeAssistantConnectZBT2ConfigEntry
from .config_flow import ZBT2FirmwareMixin
from .const import DOMAIN, FIRMWARE, FIRMWARE_VERSION, HARDWARE_NAME, SERIAL_NUMBER
_LOGGER = logging.getLogger(__name__)
@@ -134,8 +134,7 @@ async def async_setup_entry(
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
"""Connect ZBT-2 firmware update entity."""
BOOTLOADER_RESET_METHODS = ZBT2FirmwareMixin.BOOTLOADER_RESET_METHODS
APPLICATION_PROBE_METHODS = ZBT2FirmwareMixin.APPLICATION_PROBE_METHODS
bootloader_reset_methods = [ResetTarget.RTS_DTR]
def __init__(
self,

View File

@@ -81,7 +81,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
BOOTLOADER_RESET_METHODS: list[ResetTarget] = [] # Default, subclasses may override
APPLICATION_PROBE_METHODS: list[tuple[ApplicationType, int]] = []
_picked_firmware_type: PickedFirmwareType
_zigbee_flow_strategy: ZigbeeFlowStrategy = ZigbeeFlowStrategy.RECOMMENDED
@@ -231,11 +230,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
# Installing new firmware is only truly required if the wrong type is
# installed: upgrading to the latest release of the current firmware type
# isn't strictly necessary for functionality.
self._probed_firmware_info = await probe_silabs_firmware_info(
self._device,
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
application_probe_methods=self.APPLICATION_PROBE_METHODS,
)
self._probed_firmware_info = await probe_silabs_firmware_info(self._device)
firmware_install_required = self._probed_firmware_info is None or (
self._probed_firmware_info.firmware_type != expected_installed_firmware_type
@@ -300,7 +295,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
fw_data=fw_data,
expected_installed_firmware_type=expected_installed_firmware_type,
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
application_probe_methods=self.APPLICATION_PROBE_METHODS,
progress_callback=lambda offset, total: self.async_update_progress(
offset / total
),

View File

@@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
"integration_type": "system",
"requirements": [
"universal-silabs-flasher==0.1.0",
"universal-silabs-flasher==0.0.37",
"ha-silabs-firmware-client==0.3.0"
]
}

View File

@@ -86,8 +86,7 @@ class BaseFirmwareUpdateEntity(
# Subclasses provide the mapping between firmware types and entity descriptions
entity_description: FirmwareUpdateEntityDescription
BOOTLOADER_RESET_METHODS: list[ResetTarget]
APPLICATION_PROBE_METHODS: list[tuple[ApplicationType, int]]
bootloader_reset_methods: list[ResetTarget] = []
_attr_supported_features = (
UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS
@@ -279,8 +278,7 @@ class BaseFirmwareUpdateEntity(
device=self._current_device,
fw_data=fw_data,
expected_installed_firmware_type=self.entity_description.expected_firmware_type,
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
application_probe_methods=self.APPLICATION_PROBE_METHODS,
bootloader_reset_methods=self.bootloader_reset_methods,
progress_callback=self._update_progress,
domain=self._config_entry.domain,
)

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
import asyncio
from collections import defaultdict
from collections.abc import AsyncIterator, Callable, Sequence
from collections.abc import AsyncIterator, Callable, Iterable, Sequence
from contextlib import AsyncExitStack, asynccontextmanager
from dataclasses import dataclass
from enum import StrEnum
@@ -309,20 +309,15 @@ async def guess_firmware_info(hass: HomeAssistant, device_path: str) -> Firmware
async def probe_silabs_firmware_info(
device: str,
*,
bootloader_reset_methods: Sequence[ResetTarget],
application_probe_methods: Sequence[tuple[ApplicationType, int]],
device: str, *, probe_methods: Iterable[ApplicationType] | None = None
) -> FirmwareInfo | None:
"""Probe the running firmware on a SiLabs device."""
flasher = Flasher(
device=device,
probe_methods=tuple(
(m.as_flasher_application_type(), baudrate)
for m, baudrate in application_probe_methods
),
bootloader_reset=tuple(
m.as_flasher_reset_target() for m in bootloader_reset_methods
**(
{"probe_methods": [m.as_flasher_application_type() for m in probe_methods]}
if probe_methods
else {}
),
)
@@ -348,18 +343,11 @@ async def probe_silabs_firmware_info(
async def probe_silabs_firmware_type(
device: str,
*,
bootloader_reset_methods: Sequence[ResetTarget],
application_probe_methods: Sequence[tuple[ApplicationType, int]],
device: str, *, probe_methods: Iterable[ApplicationType] | None = None
) -> ApplicationType | None:
"""Probe the running firmware type on a SiLabs device."""
fw_info = await probe_silabs_firmware_info(
device,
bootloader_reset_methods=bootloader_reset_methods,
application_probe_methods=application_probe_methods,
)
fw_info = await probe_silabs_firmware_info(device, probe_methods=probe_methods)
if fw_info is None:
return None
@@ -371,22 +359,12 @@ async def async_flash_silabs_firmware(
device: str,
fw_data: bytes,
expected_installed_firmware_type: ApplicationType,
bootloader_reset_methods: Sequence[ResetTarget],
application_probe_methods: Sequence[tuple[ApplicationType, int]],
bootloader_reset_methods: Sequence[ResetTarget] = (),
progress_callback: Callable[[int, int], None] | None = None,
*,
domain: str = DOMAIN,
) -> FirmwareInfo:
"""Flash firmware to the SiLabs device."""
if not any(
method == expected_installed_firmware_type
for method, _ in application_probe_methods
):
raise ValueError(
f"Expected installed firmware type {expected_installed_firmware_type!r}"
f" not in application probe methods {application_probe_methods!r}"
)
async with async_firmware_update_context(hass, device, domain):
firmware_info = await guess_firmware_info(hass, device)
_LOGGER.debug("Identified firmware info: %s", firmware_info)
@@ -395,9 +373,11 @@ async def async_flash_silabs_firmware(
flasher = Flasher(
device=device,
probe_methods=tuple(
(m.as_flasher_application_type(), baudrate)
for m, baudrate in application_probe_methods
probe_methods=(
ApplicationType.GECKO_BOOTLOADER.as_flasher_application_type(),
ApplicationType.EZSP.as_flasher_application_type(),
ApplicationType.SPINEL.as_flasher_application_type(),
ApplicationType.CPC.as_flasher_application_type(),
),
bootloader_reset=tuple(
m.as_flasher_reset_target() for m in bootloader_reset_methods
@@ -421,13 +401,7 @@ async def async_flash_silabs_firmware(
probed_firmware_info = await probe_silabs_firmware_info(
device,
bootloader_reset_methods=bootloader_reset_methods,
# Only probe for the expected installed firmware type
application_probe_methods=[
(method, baudrate)
for method, baudrate in application_probe_methods
if method == expected_installed_firmware_type
],
probe_methods=(expected_installed_firmware_type,),
)
if probed_firmware_info is None:

View File

@@ -16,7 +16,6 @@ from homeassistant.components.homeassistant_hardware.helpers import (
from homeassistant.components.homeassistant_hardware.util import (
ApplicationType,
FirmwareInfo,
ResetTarget,
)
from homeassistant.components.usb import (
usb_service_info_from_device,
@@ -80,20 +79,6 @@ class SkyConnectFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
context: ConfigFlowContext
ZIGBEE_BAUDRATE = 115200
# There is no hardware bootloader trigger
BOOTLOADER_RESET_METHODS: list[ResetTarget] = []
APPLICATION_PROBE_METHODS = [
(ApplicationType.GECKO_BOOTLOADER, 115200),
(ApplicationType.EZSP, ZIGBEE_BAUDRATE),
(ApplicationType.SPINEL, 460800),
# CPC baudrates can be removed once multiprotocol is removed
(ApplicationType.CPC, 115200),
(ApplicationType.CPC, 230400),
(ApplicationType.CPC, 460800),
(ApplicationType.ROUTER, 115200),
]
def _get_translation_placeholders(self) -> dict[str, str]:
"""Shared translation placeholders."""
placeholders = {

View File

@@ -6,12 +6,6 @@
"dependencies": ["hardware", "usb", "homeassistant_hardware"],
"documentation": "https://www.home-assistant.io/integrations/homeassistant_sky_connect",
"integration_type": "hardware",
"loggers": [
"bellows",
"universal_silabs_flasher",
"zigpy.serial",
"serial_asyncio_fast"
],
"usb": [
{
"description": "*skyconnect v1.0*",

View File

@@ -23,7 +23,6 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HomeAssistantSkyConnectConfigEntry
from .config_flow import SkyConnectFirmwareMixin
from .const import (
DOMAIN,
FIRMWARE,
@@ -152,8 +151,8 @@ async def async_setup_entry(
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
"""SkyConnect firmware update entity."""
BOOTLOADER_RESET_METHODS = SkyConnectFirmwareMixin.BOOTLOADER_RESET_METHODS
APPLICATION_PROBE_METHODS = SkyConnectFirmwareMixin.APPLICATION_PROBE_METHODS
# The ZBT-1 does not have a hardware bootloader trigger
bootloader_reset_methods = []
def __init__(
self,

View File

@@ -82,18 +82,7 @@ else:
class YellowFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
"""Mixin for Home Assistant Yellow firmware methods."""
ZIGBEE_BAUDRATE = 115200
BOOTLOADER_RESET_METHODS = [ResetTarget.YELLOW]
APPLICATION_PROBE_METHODS = [
(ApplicationType.GECKO_BOOTLOADER, 115200),
(ApplicationType.EZSP, ZIGBEE_BAUDRATE),
(ApplicationType.SPINEL, 460800),
# CPC baudrates can be removed once multiprotocol is removed
(ApplicationType.CPC, 115200),
(ApplicationType.CPC, 230400),
(ApplicationType.CPC, 460800),
(ApplicationType.ROUTER, 115200),
]
async def async_step_install_zigbee_firmware(
self, user_input: dict[str, Any] | None = None
@@ -157,11 +146,7 @@ class HomeAssistantYellowConfigFlow(
assert self._device is not None
# We do not actually use any portion of `BaseFirmwareConfigFlow` beyond this
self._probed_firmware_info = await probe_silabs_firmware_info(
self._device,
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
application_probe_methods=self.APPLICATION_PROBE_METHODS,
)
self._probed_firmware_info = await probe_silabs_firmware_info(self._device)
# Kick off ZHA hardware discovery automatically if Zigbee firmware is running
if (

View File

@@ -7,11 +7,5 @@
"dependencies": ["hardware", "homeassistant_hardware"],
"documentation": "https://www.home-assistant.io/integrations/homeassistant_yellow",
"integration_type": "hardware",
"loggers": [
"bellows",
"universal_silabs_flasher",
"zigpy.serial",
"serial_asyncio_fast"
],
"single_config_entry": true
}

View File

@@ -14,6 +14,7 @@ from homeassistant.components.homeassistant_hardware.update import (
from homeassistant.components.homeassistant_hardware.util import (
ApplicationType,
FirmwareInfo,
ResetTarget,
)
from homeassistant.components.update import UpdateDeviceClass
from homeassistant.const import EntityCategory
@@ -23,7 +24,6 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HomeAssistantYellowConfigEntry
from .config_flow import YellowFirmwareMixin
from .const import DOMAIN, FIRMWARE, FIRMWARE_VERSION, MANUFACTURER, MODEL, RADIO_DEVICE
_LOGGER = logging.getLogger(__name__)
@@ -150,8 +150,7 @@ async def async_setup_entry(
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
"""Yellow firmware update entity."""
BOOTLOADER_RESET_METHODS = YellowFirmwareMixin.BOOTLOADER_RESET_METHODS
APPLICATION_PROBE_METHODS = YellowFirmwareMixin.APPLICATION_PROBE_METHODS
bootloader_reset_methods = [ResetTarget.YELLOW] # Triggers a GPIO reset
def __init__(
self,

View File

@@ -4,7 +4,6 @@ import logging
from typing import TYPE_CHECKING
from aiopvapi.resources.model import PowerviewData
from aiopvapi.resources.shade_data import PowerviewShadeData
from aiopvapi.rooms import Rooms
from aiopvapi.scenes import Scenes
from aiopvapi.shades import Shades
@@ -17,6 +16,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import DOMAIN, HUB_EXCEPTIONS, MANUFACTURER
from .coordinator import PowerviewShadeUpdateCoordinator
from .model import PowerviewConfigEntry, PowerviewEntryData
from .shade_data import PowerviewShadeData
from .util import async_connect_hub
PARALLEL_UPDATES = 1

View File

@@ -8,7 +8,6 @@ import logging
from aiopvapi.helpers.aiorequest import PvApiMaintenance
from aiopvapi.hub import Hub
from aiopvapi.resources.shade_data import PowerviewShadeData
from aiopvapi.shades import Shades
from homeassistant.config_entries import ConfigEntry
@@ -16,6 +15,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import HUB_EXCEPTIONS
from .shade_data import PowerviewShadeData
_LOGGER = logging.getLogger(__name__)

View File

@@ -208,13 +208,13 @@ class PowerViewShadeBase(ShadeEntity, CoverEntity):
async def _async_execute_move(self, move: ShadePosition) -> None:
"""Execute a move that can affect multiple positions."""
_LOGGER.debug("Move request %s: %s", self.name, move)
# Store the requested positions so subsequent move
# requests contain the secondary shade positions
self.data.update_shade_position(self._shade.id, move)
async with self.coordinator.radio_operation_lock:
response = await self._shade.move(move)
_LOGGER.debug("Move response %s: %s", self.name, response)
# Process the response from the hub (including new positions)
self.data.update_shade_position(self._shade.id, response)
async def _async_set_cover_position(self, target_hass_position: int) -> None:
"""Move the shade to a position."""
target_hass_position = self._clamp_cover_limit(target_hass_position)

View File

@@ -3,7 +3,6 @@
import logging
from aiopvapi.resources.shade import BaseShade, ShadePosition
from aiopvapi.resources.shade_data import PowerviewShadeData
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import DeviceInfo
@@ -12,6 +11,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import PowerviewShadeUpdateCoordinator
from .model import PowerviewDeviceInfo
from .shade_data import PowerviewShadeData
_LOGGER = logging.getLogger(__name__)

View File

@@ -18,6 +18,6 @@
},
"iot_class": "local_polling",
"loggers": ["aiopvapi"],
"requirements": ["aiopvapi==3.3.0"],
"requirements": ["aiopvapi==3.2.1"],
"zeroconf": ["_powerview._tcp.local.", "_PowerView-G3._tcp.local."]
}

View File

@@ -0,0 +1,80 @@
"""Shade data for the Hunter Douglas PowerView integration."""
from __future__ import annotations
from dataclasses import fields
from typing import Any
from aiopvapi.resources.model import PowerviewData
from aiopvapi.resources.shade import BaseShade, ShadePosition
from .util import async_map_data_by_id
POSITION_FIELDS = [field for field in fields(ShadePosition) if field.name != "velocity"]
def copy_position_data(source: ShadePosition, target: ShadePosition) -> ShadePosition:
"""Copy position data from source to target for None values only."""
for field in POSITION_FIELDS:
if (value := getattr(source, field.name)) is not None:
setattr(target, field.name, value)
class PowerviewShadeData:
"""Coordinate shade data between multiple api calls."""
def __init__(self) -> None:
"""Init the shade data."""
self._raw_data_by_id: dict[int, dict[str | int, Any]] = {}
self._shade_group_data_by_id: dict[int, BaseShade] = {}
self.positions: dict[int, ShadePosition] = {}
def get_raw_data(self, shade_id: int) -> dict[str | int, Any]:
"""Get data for the shade."""
return self._raw_data_by_id[shade_id]
def get_all_raw_data(self) -> dict[int, dict[str | int, Any]]:
"""Get data for all shades."""
return self._raw_data_by_id
def get_shade(self, shade_id: int) -> BaseShade:
"""Get specific shade from the coordinator."""
return self._shade_group_data_by_id[shade_id]
def get_shade_position(self, shade_id: int) -> ShadePosition:
"""Get positions for a shade."""
if shade_id not in self.positions:
shade_position = ShadePosition()
# If we have the group data, use it to populate the initial position
if shade := self._shade_group_data_by_id.get(shade_id):
copy_position_data(shade.current_position, shade_position)
self.positions[shade_id] = shade_position
return self.positions[shade_id]
def update_from_group_data(self, shade_id: int) -> None:
"""Process an update from the group data."""
data = self._shade_group_data_by_id[shade_id]
copy_position_data(data.current_position, self.get_shade_position(data.id))
def store_group_data(self, shade_data: PowerviewData) -> None:
"""Store data from the all shades endpoint.
This does not update the shades or positions (self.positions)
as the data may be stale. update_from_group_data
with a shade_id will update a specific shade
from the group data.
"""
self._shade_group_data_by_id = shade_data.processed
self._raw_data_by_id = async_map_data_by_id(shade_data.raw)
def update_shade_position(self, shade_id: int, new_position: ShadePosition) -> None:
"""Update a single shades position."""
copy_position_data(new_position, self.get_shade_position(shade_id))
def update_shade_velocity(self, shade_id: int, shade_data: ShadePosition) -> None:
"""Update a single shades velocity."""
# the hub will always return a velocity of 0 on initial connect,
# separate definition to store consistent value in HA
# this value is purely driven from HA
if shade_data.velocity is not None:
self.get_shade_position(shade_id).velocity = shade_data.velocity

View File

@@ -2,15 +2,25 @@
from __future__ import annotations
from collections.abc import Iterable
from typing import Any
from aiopvapi.helpers.aiorequest import AioRequest
from aiopvapi.helpers.constants import ATTR_ID
from aiopvapi.hub import Hub
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .model import PowerviewAPI, PowerviewDeviceInfo
@callback
def async_map_data_by_id(data: Iterable[dict[str | int, Any]]):
"""Return a dict with the key being the id for a list of entries."""
return {entry[ATTR_ID]: entry for entry in data}
async def async_connect_hub(
hass: HomeAssistant, address: str, api_version: int | None = None
) -> PowerviewAPI:

View File

@@ -121,15 +121,12 @@ class AutomowerBaseEntity(CoordinatorEntity[AutomowerDataUpdateCoordinator]):
"""Initialize AutomowerEntity."""
super().__init__(coordinator)
self.mower_id = mower_id
model_witout_manufacturer = self.mower_attributes.system.model.removeprefix(
"Husqvarna "
).removeprefix("HUSQVARNA ")
parts = model_witout_manufacturer.split(maxsplit=1)
parts = self.mower_attributes.system.model.split(maxsplit=2)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, mower_id)},
manufacturer="Husqvarna",
model=parts[0].capitalize().removesuffix("®"),
model_id=parts[1],
manufacturer=parts[0],
model=parts[1],
model_id=parts[2],
name=self.mower_attributes.system.name,
serial_number=self.mower_attributes.system.serial_number,
suggested_area="Garden",

View File

@@ -13,7 +13,6 @@ from typing import Any
from aiohttp import web
from hyperion import client
from hyperion.const import (
KEY_DATA,
KEY_IMAGE,
KEY_IMAGE_STREAM,
KEY_LEDCOLORS,
@@ -156,8 +155,7 @@ class HyperionCamera(Camera):
"""Update Hyperion components."""
if not img:
return
# Prefer KEY_DATA (Hyperion server >= 2.1.1); fall back to KEY_RESULT for older server versions
img_data = img.get(KEY_DATA, img.get(KEY_RESULT, {})).get(KEY_IMAGE)
img_data = img.get(KEY_RESULT, {}).get(KEY_IMAGE)
if not img_data or not img_data.startswith(IMAGE_STREAM_JPG_SENTINEL):
return
async with self._image_cond:

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from propcache.api import cached_property
from pyituran import Vehicle
from homeassistant.components.binary_sensor import (
@@ -68,7 +69,7 @@ class IturanBinarySensor(IturanBaseEntity, BinarySensorEntity):
super().__init__(coordinator, license_plate, description.key)
self.entity_description = description
@property
@cached_property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return self.entity_description.value_fn(self.vehicle)

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from propcache.api import cached_property
from homeassistant.components.device_tracker import TrackerEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -38,12 +40,12 @@ class IturanDeviceTracker(IturanBaseEntity, TrackerEntity):
"""Initialize the device tracker."""
super().__init__(coordinator, license_plate, "device_tracker")
@property
@cached_property
def latitude(self) -> float | None:
"""Return latitude value of the device."""
return self.vehicle.gps_coordinates[0]
@property
@cached_property
def longitude(self) -> float | None:
"""Return longitude value of the device."""
return self.vehicle.gps_coordinates[1]

View File

@@ -6,6 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from propcache.api import cached_property
from pyituran import Vehicle
from homeassistant.components.sensor import (
@@ -132,7 +133,7 @@ class IturanSensor(IturanBaseEntity, SensorEntity):
super().__init__(coordinator, license_plate, description.key)
self.entity_description = description
@property
@cached_property
def native_value(self) -> StateType | datetime:
"""Return the state of the device."""
return self.entity_description.value_fn(self.vehicle)

View File

@@ -37,5 +37,5 @@
"iot_class": "cloud_push",
"loggers": ["pylamarzocco"],
"quality_scale": "platinum",
"requirements": ["pylamarzocco==2.1.3"]
"requirements": ["pylamarzocco==2.1.2"]
}

View File

@@ -125,7 +125,7 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
await self.coordinator.device.update_firmware()
while (
update_progress := await self.coordinator.device.get_firmware()
).command_status is not UpdateStatus.UPDATED:
).command_status is UpdateStatus.IN_PROGRESS:
if counter >= MAX_UPDATE_WAIT:
_raise_timeout_error()
self._attr_update_percentage = update_progress.progress_percentage

View File

@@ -94,6 +94,28 @@
}
},
"services": {
"address_to_device_id": {
"description": "Converts an LCN address into a device ID.",
"fields": {
"host": {
"description": "Host name as given in the integration panel.",
"name": "Host name"
},
"id": {
"description": "Module or group number of the target.",
"name": "Module or group ID"
},
"segment_id": {
"description": "Segment number of the target.",
"name": "Segment ID"
},
"type": {
"description": "Module type of the target.",
"name": "Type"
}
},
"name": "Address to device ID"
},
"dyn_text": {
"description": "Sends dynamic text to LCN-GTxD displays.",
"fields": {

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
"iot_class": "local_polling",
"loggers": ["ical"],
"requirements": ["ical==11.1.0"]
"requirements": ["ical==11.0.0"]
}

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/local_todo",
"iot_class": "local_polling",
"requirements": ["ical==11.1.0"]
"requirements": ["ical==11.0.0"]
}

View File

@@ -154,7 +154,6 @@ class LocalTodoListEntity(TodoListEntity):
),
due=due,
description=item.description,
completed=item.completed,
)
)
self._attr_todo_items = todo_items

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["lunatone-rest-api-client==0.5.7"]
"requirements": ["lunatone-rest-api-client==0.5.3"]
}

View File

@@ -353,13 +353,17 @@ DISCOVERY_SCHEMAS = [
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# DeviceFault or SupplyFault bit enabled
device_to_ha=lambda x: bool(
x
& (
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault
| clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault
)
),
device_to_ha={
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault: True,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault: True,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedLow: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedHigh: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kLocalOverride: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemotePressure: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteFlow: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteTemperature: False,
}.get,
),
entity_class=MatterBinarySensor,
required_attributes=(
@@ -373,9 +377,9 @@ DISCOVERY_SCHEMAS = [
key="PumpStatusRunning",
translation_key="pump_running",
device_class=BinarySensorDeviceClass.RUNNING,
device_to_ha=lambda x: bool(
device_to_ha=lambda x: (
x
& clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
== clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
),
),
entity_class=MatterBinarySensor,
@@ -391,8 +395,8 @@ DISCOVERY_SCHEMAS = [
translation_key="dishwasher_alarm_inflow",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: bool(
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
device_to_ha=lambda x: (
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
),
),
entity_class=MatterBinarySensor,
@@ -406,8 +410,8 @@ DISCOVERY_SCHEMAS = [
translation_key="alarm_door",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: bool(
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
device_to_ha=lambda x: (
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
),
),
entity_class=MatterBinarySensor,
@@ -421,10 +425,9 @@ DISCOVERY_SCHEMAS = [
translation_key="valve_fault_general_fault",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# GeneralFault bit from ValveFault attribute
device_to_ha=lambda x: bool(
device_to_ha=lambda x: (
x
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kGeneralFault
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kGeneralFault
),
),
entity_class=MatterBinarySensor,
@@ -440,10 +443,9 @@ DISCOVERY_SCHEMAS = [
translation_key="valve_fault_blocked",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# Blocked bit from ValveFault attribute
device_to_ha=lambda x: bool(
device_to_ha=lambda x: (
x
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kBlocked
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kBlocked
),
),
entity_class=MatterBinarySensor,
@@ -459,10 +461,9 @@ DISCOVERY_SCHEMAS = [
translation_key="valve_fault_leaking",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# Leaking bit from ValveFault attribute
device_to_ha=lambda x: bool(
device_to_ha=lambda x: (
x
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kLeaking
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kLeaking
),
),
entity_class=MatterBinarySensor,
@@ -477,8 +478,8 @@ DISCOVERY_SCHEMAS = [
translation_key="alarm_door",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: bool(
x & clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
device_to_ha=lambda x: (
x == clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
),
),
entity_class=MatterBinarySensor,

View File

@@ -194,7 +194,7 @@
"name": "Altitude above sea level"
},
"auto_relock_timer": {
"name": "Auto-relock time"
"name": "Autorelock time"
},
"cook_time": {
"name": "Cooking time"

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"quality_scale": "platinum",
"requirements": ["aiomealie==1.1.0"]
"requirements": ["aiomealie==1.0.1"]
}

View File

@@ -139,7 +139,7 @@ class MeteoLtWeatherEntity(CoordinatorEntity[MeteoLtUpdateCoordinator], WeatherE
forecasts_by_date[date].append(timestamp)
daily_forecasts = []
for date in sorted(forecasts_by_date.keys()):
for date in sorted(forecasts_by_date.keys())[:5]:
day_forecasts = forecasts_by_date[date]
if not day_forecasts:
continue
@@ -186,5 +186,5 @@ class MeteoLtWeatherEntity(CoordinatorEntity[MeteoLtUpdateCoordinator], WeatherE
return None
return [
self._convert_forecast_data(forecast_data)
for forecast_data in self.coordinator.data.forecast_timestamps
for forecast_data in self.coordinator.data.forecast_timestamps[:24]
]

View File

@@ -6,7 +6,7 @@ from dataclasses import dataclass
import logging
from typing import Final
from aiohttp import ClientResponseError
import aiohttp
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.core import HomeAssistant
@@ -153,12 +153,11 @@ class MieleButton(MieleEntity, ButtonEntity):
self._device_id,
{PROCESS_ACTION: self.entity_description.press_data},
)
except ClientResponseError as err:
_LOGGER.debug("Error setting button state for %s: %s", self.entity_id, err)
except aiohttp.ClientResponseError as ex:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_state_error",
translation_placeholders={
"entity": self.entity_id,
},
) from err
) from ex

View File

@@ -7,7 +7,7 @@ from dataclasses import dataclass
import logging
from typing import Any, Final, cast
from aiohttp import ClientResponseError
import aiohttp
from pymiele import MieleDevice, MieleTemperature
from homeassistant.components.climate import (
@@ -250,8 +250,7 @@ class MieleClimate(MieleEntity, ClimateEntity):
cast(float, kwargs.get(ATTR_TEMPERATURE)),
self.entity_description.zone,
)
except ClientResponseError as err:
_LOGGER.debug("Error setting climate state for %s: %s", self.entity_id, err)
except aiohttp.ClientError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_state_error",

View File

@@ -73,7 +73,7 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
_LOGGER.debug(
"Error fetching actions for device %s: Status: %s, Message: %s",
device_id,
str(err.status),
err.status,
err.message,
)
actions_json = {}

View File

@@ -142,15 +142,14 @@ class MieleFan(MieleEntity, FanEntity):
await self.api.send_action(
self._device_id, {VENTILATION_STEP: ventilation_step}
)
except ClientResponseError as err:
_LOGGER.debug("Error setting fan state for %s: %s", self.entity_id, err)
except ClientResponseError as ex:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_state_error",
translation_placeholders={
"entity": self.entity_id,
},
) from err
) from ex
self.device.state_ventilation_step = ventilation_step
self.async_write_ha_state()
@@ -172,7 +171,6 @@ class MieleFan(MieleEntity, FanEntity):
translation_key="set_state_error",
translation_placeholders={
"entity": self.entity_id,
"err_status": str(ex.status),
},
) from ex
@@ -190,7 +188,6 @@ class MieleFan(MieleEntity, FanEntity):
translation_key="set_state_error",
translation_placeholders={
"entity": self.entity_id,
"err_status": str(ex.status),
},
) from ex

View File

@@ -7,7 +7,7 @@ from dataclasses import dataclass
import logging
from typing import Any, Final
from aiohttp import ClientResponseError
import aiohttp
from homeassistant.components.light import (
ColorMode,
@@ -131,8 +131,7 @@ class MieleLight(MieleEntity, LightEntity):
await self.api.send_action(
self._device_id, {self.entity_description.light_type: mode}
)
except ClientResponseError as err:
_LOGGER.debug("Error setting light state for %s: %s", self.entity_id, err)
except aiohttp.ClientError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_state_error",

View File

@@ -4,7 +4,7 @@ from datetime import timedelta
import logging
from typing import cast
from aiohttp import ClientResponseError
import aiohttp
import voluptuous as vol
from homeassistant.const import ATTR_DEVICE_ID, ATTR_TEMPERATURE
@@ -107,7 +107,7 @@ async def set_program(call: ServiceCall) -> None:
data = {"programId": call.data[ATTR_PROGRAM_ID]}
try:
await api.set_program(serial_number, data)
except ClientResponseError as ex:
except aiohttp.ClientResponseError as ex:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_program_error",
@@ -137,7 +137,7 @@ async def set_program_oven(call: ServiceCall) -> None:
data["temperature"] = call.data[ATTR_TEMPERATURE]
try:
await api.set_program(serial_number, data)
except ClientResponseError as ex:
except aiohttp.ClientResponseError as ex:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_program_oven_error",
@@ -157,7 +157,7 @@ async def get_programs(call: ServiceCall) -> ServiceResponse:
try:
programs = await api.get_programs(serial_number)
except ClientResponseError as ex:
except aiohttp.ClientResponseError as ex:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="get_programs_error",

View File

@@ -1009,7 +1009,7 @@
"cleaning_care_program": "Cleaning/care program",
"maintenance_program": "Maintenance program",
"normal_operation_mode": "Normal operation mode",
"own_program": "Program"
"own_program": "Own program"
}
},
"remaining_time": {
@@ -1089,7 +1089,7 @@
"message": "Invalid device targeted."
},
"oauth2_implementation_unavailable": {
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
"message": "OAuth2 implementation unavailable, will retry"
},
"set_program_error": {
"message": "'Set program' action failed: {status} / {message}"

View File

@@ -7,7 +7,7 @@ from dataclasses import dataclass
import logging
from typing import Any, Final, cast
from aiohttp import ClientResponseError
import aiohttp
from pymiele import MieleDevice
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
@@ -165,8 +165,7 @@ class MieleSwitch(MieleEntity, SwitchEntity):
"""Set switch to mode."""
try:
await self.api.send_action(self._device_id, mode)
except ClientResponseError as err:
_LOGGER.debug("Error setting switch state for %s: %s", self.entity_id, err)
except aiohttp.ClientError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_state_error",
@@ -198,8 +197,7 @@ class MielePowerSwitch(MieleSwitch):
"""Set switch to mode."""
try:
await self.api.send_action(self._device_id, mode)
except ClientResponseError as err:
_LOGGER.debug("Error setting switch state for %s: %s", self.entity_id, err)
except aiohttp.ClientError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_state_error",

Some files were not shown because too many files have changed in this diff Show More