mirror of
https://github.com/home-assistant/core.git
synced 2025-11-11 20:10:12 +00:00
Compare commits
100 Commits
copilot/ad
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9760eb7f2b | ||
|
|
7851bed00c | ||
|
|
6aba0b20c6 | ||
|
|
cadfed2348 | ||
|
|
44e2fa6996 | ||
|
|
d0ff617e17 | ||
|
|
8e499569a4 | ||
|
|
5e0ebddd6f | ||
|
|
c0f61f6c2b | ||
|
|
df60de38b0 | ||
|
|
cb086bb8e9 | ||
|
|
ee2e9dc7d6 | ||
|
|
85cd3c68b7 | ||
|
|
1b0b6e63f2 | ||
|
|
12fc79e8d3 | ||
|
|
ca2e7b9509 | ||
|
|
8e8becc43e | ||
|
|
dcec6c3dc8 | ||
|
|
c0e59c4508 | ||
|
|
cd379aadbf | ||
|
|
ccdd54b187 | ||
|
|
3f22dbaa2e | ||
|
|
c18dc0a9ab | ||
|
|
f0e4296d93 | ||
|
|
b3750109c6 | ||
|
|
93025c9845 | ||
|
|
df348644b1 | ||
|
|
8749b0d750 | ||
|
|
a6a1519c06 | ||
|
|
3068e19843 | ||
|
|
55feb1e735 | ||
|
|
bb7dc69131 | ||
|
|
aa9003a524 | ||
|
|
4e9da5249d | ||
|
|
f502739df2 | ||
|
|
0f2ff29378 | ||
|
|
2921e7ed3c | ||
|
|
25d44e8d37 | ||
|
|
0a480a26a3 | ||
|
|
d5da64dd8d | ||
|
|
92adcd8635 | ||
|
|
ee0c4b15c2 | ||
|
|
507f54198e | ||
|
|
0ed342b433 | ||
|
|
363c86faf3 | ||
|
|
095a7ad060 | ||
|
|
ab5981bbbd | ||
|
|
ac2fb53dfd | ||
|
|
02ff5de1ff | ||
|
|
5cd5d480d9 | ||
|
|
a3c7d772fc | ||
|
|
fe0c69dba7 | ||
|
|
e5365234c3 | ||
|
|
1531175bd3 | ||
|
|
62add59ff4 | ||
|
|
d8daca657b | ||
|
|
1891da46ea | ||
|
|
22ae894745 | ||
|
|
160810c69d | ||
|
|
2ae23b920a | ||
|
|
a7edfb082f | ||
|
|
3ac203b05f | ||
|
|
7c3eb19fc4 | ||
|
|
70c6fac743 | ||
|
|
e19d7250d5 | ||
|
|
a850d5dba7 | ||
|
|
0cf0f10654 | ||
|
|
8429f154ca | ||
|
|
7b4f5ad362 | ||
|
|
583b439557 | ||
|
|
05922de102 | ||
|
|
7675a44b90 | ||
|
|
1e4d645683 | ||
|
|
b5ae04605a | ||
|
|
2240d6b94c | ||
|
|
d1536ee636 | ||
|
|
8a926add7a | ||
|
|
31f769900a | ||
|
|
33ad777664 | ||
|
|
59a4e4a337 | ||
|
|
66a39933b0 | ||
|
|
ad395e3bba | ||
|
|
cfc6f2c229 | ||
|
|
63aa41c766 | ||
|
|
037e0e93d3 | ||
|
|
db8b5865b3 | ||
|
|
bd2ccc6672 | ||
|
|
bb63d40cdf | ||
|
|
65285b8885 | ||
|
|
326b8f2b4f | ||
|
|
9f3df52fcc | ||
|
|
875838c277 | ||
|
|
adaafd1fda | ||
|
|
50c5efddaa | ||
|
|
c4be054161 | ||
|
|
61186356f3 | ||
|
|
9d60a19440 | ||
|
|
108c212855 | ||
|
|
ae8db81c4e | ||
|
|
51c970d1d0 |
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -622,7 +622,7 @@ jobs:
|
||||
steps:
|
||||
- *checkout
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1017,8 +1017,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant
|
||||
/tests/components/music_assistant/ @music-assistant
|
||||
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
|
||||
/tests/components/music_assistant/ @music-assistant @arturpragacz
|
||||
/homeassistant/components/mutesync/ @currentoor
|
||||
/tests/components/mutesync/ @currentoor
|
||||
/homeassistant/components/my/ @home-assistant/core
|
||||
|
||||
@@ -25,7 +25,7 @@ from .const import (
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
)
|
||||
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
80
homeassistant/components/anthropic/ai_task.py
Normal file
80
homeassistant/components/anthropic/ai_task.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""AI Task integration for Anthropic."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
|
||||
from homeassistant.components import ai_task, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .entity import AnthropicBaseLLMEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AI Task entities."""
|
||||
for subentry in config_entry.subentries.values():
|
||||
if subentry.subentry_type != "ai_task_data":
|
||||
continue
|
||||
|
||||
async_add_entities(
|
||||
[AnthropicTaskEntity(config_entry, subentry)],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class AnthropicTaskEntity(
|
||||
ai_task.AITaskEntity,
|
||||
AnthropicBaseLLMEntity,
|
||||
):
|
||||
"""Anthropic AI Task entity."""
|
||||
|
||||
_attr_supported_features = (
|
||||
ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
|
||||
)
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
task: ai_task.GenDataTask,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenDataTaskResult:
|
||||
"""Handle a generate data task."""
|
||||
await self._async_handle_chat_log(chat_log, task.name, task.structure)
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
raise HomeAssistantError(
|
||||
"Last content in chat log is not an AssistantContent"
|
||||
)
|
||||
|
||||
text = chat_log.content[-1].content or ""
|
||||
|
||||
if not task.structure:
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=text,
|
||||
)
|
||||
try:
|
||||
data = json_loads(text)
|
||||
except JSONDecodeError as err:
|
||||
_LOGGER.error(
|
||||
"Failed to parse JSON response: %s. Response: %s",
|
||||
err,
|
||||
text,
|
||||
)
|
||||
raise HomeAssistantError("Error with Claude structured response") from err
|
||||
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=data,
|
||||
)
|
||||
@@ -53,6 +53,7 @@ from .const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
NON_THINKING_MODELS,
|
||||
@@ -74,12 +75,16 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
RECOMMENDED_OPTIONS = {
|
||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
RECOMMENDED_AI_TASK_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect.
|
||||
@@ -102,7 +107,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(user_input)
|
||||
@@ -130,10 +135,16 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_OPTIONS,
|
||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
},
|
||||
{
|
||||
"subentry_type": "ai_task_data",
|
||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||
"title": DEFAULT_AI_TASK_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
@@ -147,7 +158,10 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"conversation": ConversationSubentryFlowHandler}
|
||||
return {
|
||||
"conversation": ConversationSubentryFlowHandler,
|
||||
"ai_task_data": ConversationSubentryFlowHandler,
|
||||
}
|
||||
|
||||
|
||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
@@ -164,7 +178,10 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Add a subentry."""
|
||||
self.options = RECOMMENDED_OPTIONS.copy()
|
||||
if self._subentry_type == "ai_task_data":
|
||||
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
|
||||
else:
|
||||
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
||||
return await self.async_step_init()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
@@ -198,23 +215,29 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if self._is_new:
|
||||
step_schema[vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME)] = (
|
||||
str
|
||||
if self._subentry_type == "ai_task_data":
|
||||
default_name = DEFAULT_AI_TASK_NAME
|
||||
else:
|
||||
default_name = DEFAULT_CONVERSATION_NAME
|
||||
step_schema[vol.Required(CONF_NAME, default=default_name)] = str
|
||||
|
||||
if self._subentry_type == "conversation":
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(options=hass_apis, multiple=True)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(options=hass_apis, multiple=True)
|
||||
),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
step_schema[
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
|
||||
)
|
||||
] = bool
|
||||
|
||||
if user_input is not None:
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
@@ -298,10 +321,14 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
if not model.startswith(tuple(NON_THINKING_MODELS)):
|
||||
step_schema[
|
||||
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
|
||||
] = NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0, max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS)
|
||||
)
|
||||
] = vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0,
|
||||
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
)
|
||||
else:
|
||||
self.options.pop(CONF_THINKING_BUDGET, None)
|
||||
|
||||
@@ -6,6 +6,7 @@ DOMAIN = "anthropic"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
||||
DEFAULT_AI_TASK_NAME = "Claude AI Task"
|
||||
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
|
||||
@@ -1,17 +1,24 @@
|
||||
"""Base entity for Anthropic."""
|
||||
|
||||
import base64
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
from dataclasses import dataclass, field
|
||||
import json
|
||||
from mimetypes import guess_file_type
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic.types import (
|
||||
Base64ImageSourceParam,
|
||||
Base64PDFSourceParam,
|
||||
CitationsDelta,
|
||||
CitationsWebSearchResultLocation,
|
||||
CitationWebSearchResultLocationParam,
|
||||
ContentBlockParam,
|
||||
DocumentBlockParam,
|
||||
ImageBlockParam,
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
@@ -37,6 +44,9 @@ from anthropic.types import (
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolChoiceAnyParam,
|
||||
ToolChoiceAutoParam,
|
||||
ToolChoiceToolParam,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUnionParam,
|
||||
@@ -50,13 +60,16 @@ from anthropic.types import (
|
||||
WebSearchToolResultError,
|
||||
)
|
||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
@@ -321,6 +334,7 @@ def _convert_content(
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
stream: AsyncStream[MessageStreamEvent],
|
||||
output_tool: str | None = None,
|
||||
) -> AsyncGenerator[
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
]:
|
||||
@@ -381,6 +395,16 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
if response.content_block.name == output_tool:
|
||||
if first_block or content_details.has_content():
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||
first_block
|
||||
@@ -471,7 +495,16 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
if (
|
||||
current_tool_block is not None
|
||||
and current_tool_block["name"] == output_tool
|
||||
):
|
||||
content_details.citation_details[-1].length += len(
|
||||
response.delta.partial_json
|
||||
)
|
||||
yield {"content": response.delta.partial_json}
|
||||
else:
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
content_details.citation_details[-1].length += len(response.delta.text)
|
||||
yield {"content": response.delta.text}
|
||||
@@ -490,6 +523,9 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
content_details.add_citation(response.delta.citation)
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_tool_block is not None:
|
||||
if current_tool_block["name"] == output_tool:
|
||||
current_tool_block = None
|
||||
continue
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_tool_block["input"] = tool_args
|
||||
yield {
|
||||
@@ -557,6 +593,8 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
structure_name: str | None = None,
|
||||
structure: vol.Schema | None = None,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
@@ -613,6 +651,74 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
}
|
||||
tools.append(web_search)
|
||||
|
||||
# Handle attachments by adding them to the last user message
|
||||
last_content = chat_log.content[-1]
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
last_message = messages[-1]
|
||||
if last_message["role"] != "user":
|
||||
raise HomeAssistantError(
|
||||
"Last message must be a user message to add attachments"
|
||||
)
|
||||
if isinstance(last_message["content"], str):
|
||||
last_message["content"] = [
|
||||
TextBlockParam(type="text", text=last_message["content"])
|
||||
]
|
||||
last_message["content"].extend( # type: ignore[union-attr]
|
||||
await async_prepare_files_for_prompt(
|
||||
self.hass, [(a.path, a.mime_type) for a in last_content.attachments]
|
||||
)
|
||||
)
|
||||
|
||||
if structure and structure_name:
|
||||
structure_name = slugify(structure_name)
|
||||
if model_args["thinking"]["type"] == "disabled":
|
||||
if not tools:
|
||||
# Simplest case: no tools and no extended thinking
|
||||
# Add a tool and force its use
|
||||
model_args["tool_choice"] = ToolChoiceToolParam(
|
||||
type="tool",
|
||||
name=structure_name,
|
||||
)
|
||||
else:
|
||||
# Second case: tools present but no extended thinking
|
||||
# Allow the model to use any tool but not text response
|
||||
# The model should know to use the right tool by its description
|
||||
model_args["tool_choice"] = ToolChoiceAnyParam(
|
||||
type="any",
|
||||
)
|
||||
else:
|
||||
# Extended thinking is enabled. With extended thinking, we cannot
|
||||
# force tool use or disable text responses, so we add a hint to the
|
||||
# system prompt instead. With extended thinking, the model should be
|
||||
# smart enough to use the tool.
|
||||
model_args["tool_choice"] = ToolChoiceAutoParam(
|
||||
type="auto",
|
||||
)
|
||||
|
||||
if isinstance(model_args["system"], str):
|
||||
model_args["system"] = [
|
||||
TextBlockParam(type="text", text=model_args["system"])
|
||||
]
|
||||
model_args["system"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
|
||||
)
|
||||
)
|
||||
|
||||
tools.append(
|
||||
ToolParam(
|
||||
name=structure_name,
|
||||
description="Use this tool to reply to the user",
|
||||
input_schema=convert(
|
||||
structure,
|
||||
custom_serializer=chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
|
||||
@@ -629,7 +735,11 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_log, stream),
|
||||
_transform_stream(
|
||||
chat_log,
|
||||
stream,
|
||||
output_tool=structure_name if structure else None,
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
@@ -641,3 +751,59 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
|
||||
|
||||
async def async_prepare_files_for_prompt(
|
||||
hass: HomeAssistant, files: list[tuple[Path, str | None]]
|
||||
) -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
||||
"""Append files to a prompt.
|
||||
|
||||
Caller needs to ensure that the files are allowed.
|
||||
"""
|
||||
|
||||
def append_files_to_content() -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
||||
content: list[ImageBlockParam | DocumentBlockParam] = []
|
||||
|
||||
for file_path, mime_type in files:
|
||||
if not file_path.exists():
|
||||
raise HomeAssistantError(f"`{file_path}` does not exist")
|
||||
|
||||
if mime_type is None:
|
||||
mime_type = guess_file_type(file_path)[0]
|
||||
|
||||
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
|
||||
raise HomeAssistantError(
|
||||
"Only images and PDF are supported by the Anthropic API,"
|
||||
f"`{file_path}` is not an image file or PDF"
|
||||
)
|
||||
if mime_type == "image/jpg":
|
||||
mime_type = "image/jpeg"
|
||||
|
||||
base64_file = base64.b64encode(file_path.read_bytes()).decode("utf-8")
|
||||
|
||||
if mime_type.startswith("image/"):
|
||||
content.append(
|
||||
ImageBlockParam(
|
||||
type="image",
|
||||
source=Base64ImageSourceParam(
|
||||
type="base64",
|
||||
media_type=mime_type, # type: ignore[typeddict-item]
|
||||
data=base64_file,
|
||||
),
|
||||
)
|
||||
)
|
||||
elif mime_type.startswith("application/pdf"):
|
||||
content.append(
|
||||
DocumentBlockParam(
|
||||
type="document",
|
||||
source=Base64PDFSourceParam(
|
||||
type="base64",
|
||||
media_type=mime_type, # type: ignore[typeddict-item]
|
||||
data=base64_file,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
return await hass.async_add_executor_job(append_files_to_content)
|
||||
|
||||
@@ -18,6 +18,49 @@
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"ai_task_data": {
|
||||
"abort": {
|
||||
"entry_not_loaded": "[%key:component::anthropic::config_subentries::conversation::abort::entry_not_loaded%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"entry_type": "AI task",
|
||||
"initiate_flow": {
|
||||
"reconfigure": "Reconfigure AI task",
|
||||
"user": "Add AI task"
|
||||
},
|
||||
"step": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
|
||||
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
|
||||
},
|
||||
"init": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
|
||||
},
|
||||
"model": {
|
||||
"data": {
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
|
||||
},
|
||||
"data_description": {
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::model::title%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"conversation": {
|
||||
"abort": {
|
||||
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
|
||||
@@ -46,7 +89,8 @@
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
}
|
||||
},
|
||||
"title": "Basic settings"
|
||||
},
|
||||
"model": {
|
||||
"data": {
|
||||
|
||||
@@ -37,13 +37,6 @@ USER_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||
@@ -175,36 +168,55 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the device."""
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=STEP_RECONFIGURE
|
||||
)
|
||||
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||
)
|
||||
if user_input is not None:
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
try:
|
||||
data_to_validate = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
|
||||
}
|
||||
await validate_input(self.hass, data_to_validate)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
data_updates = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
}
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates=data_updates
|
||||
)
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
|
||||
): cv.string,
|
||||
vol.Required(
|
||||
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
|
||||
): cv.port,
|
||||
vol.Optional(CONF_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE,
|
||||
data_schema=schema,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import Literal
|
||||
from typing import Any, Literal
|
||||
|
||||
from hassil.recognize import RecognizeResult
|
||||
import voluptuous as vol
|
||||
@@ -21,6 +21,7 @@ from homeassistant.core import (
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, intent
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
@@ -52,6 +53,8 @@ from .const import (
|
||||
DATA_COMPONENT,
|
||||
DOMAIN,
|
||||
HOME_ASSISTANT_AGENT,
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
SERVICE_PROCESS,
|
||||
SERVICE_RELOAD,
|
||||
ConversationEntityFeature,
|
||||
@@ -266,10 +269,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
|
||||
agent_config = config.get(DOMAIN, {})
|
||||
await async_setup_default_agent(
|
||||
hass, entity_component, config_intents=agent_config.get("intents", {})
|
||||
)
|
||||
manager = get_agent_manager(hass)
|
||||
|
||||
hass_config_path = hass.config.path()
|
||||
config_intents = _get_config_intents(config, hass_config_path)
|
||||
manager.update_config_intents(config_intents)
|
||||
|
||||
await async_setup_default_agent(hass, entity_component)
|
||||
|
||||
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
||||
"""Parse text into commands."""
|
||||
@@ -294,9 +300,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def handle_reload(service: ServiceCall) -> None:
|
||||
"""Reload intents."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
language = service.data.get(ATTR_LANGUAGE)
|
||||
if language is None:
|
||||
conf = await async_integration_yaml_config(hass, DOMAIN)
|
||||
if conf is not None:
|
||||
config_intents = _get_config_intents(conf, hass_config_path)
|
||||
manager.update_config_intents(config_intents)
|
||||
|
||||
agent = manager.default_agent
|
||||
if agent is not None:
|
||||
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
|
||||
await agent.async_reload(language=language)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
@@ -313,6 +326,27 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str, Any]:
|
||||
"""Return config intents."""
|
||||
intents = config.get(DOMAIN, {}).get("intents", {})
|
||||
return {
|
||||
"intents": {
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in intents.items()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
@@ -147,6 +147,7 @@ class AgentManager:
|
||||
self.hass = hass
|
||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||
self.default_agent: DefaultAgent | None = None
|
||||
self.config_intents: dict[str, Any] = {}
|
||||
self.triggers_details: list[TriggerDetails] = []
|
||||
|
||||
@callback
|
||||
@@ -199,9 +200,16 @@ class AgentManager:
|
||||
|
||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||
"""Set up the default agent."""
|
||||
agent.update_config_intents(self.config_intents)
|
||||
agent.update_triggers(self.triggers_details)
|
||||
self.default_agent = agent
|
||||
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self.config_intents = intents
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_config_intents(intents)
|
||||
|
||||
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
|
||||
"""Register a trigger."""
|
||||
self.triggers_details.append(trigger_details)
|
||||
|
||||
@@ -30,3 +30,7 @@ class ConversationEntityFeature(IntFlag):
|
||||
"""Supported features of the conversation entity."""
|
||||
|
||||
CONTROL = 1
|
||||
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
|
||||
@@ -77,7 +77,12 @@ from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .chat_log import AssistantContent, ChatLog
|
||||
from .const import DOMAIN, ConversationEntityFeature
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
ConversationEntityFeature,
|
||||
)
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput, ConversationResult
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
@@ -91,8 +96,6 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||
|
||||
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
|
||||
|
||||
ERROR_SENTINEL = object()
|
||||
@@ -202,10 +205,9 @@ class IntentCache:
|
||||
async def async_setup_default_agent(
|
||||
hass: HomeAssistant,
|
||||
entity_component: EntityComponent[ConversationEntity],
|
||||
config_intents: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set up entity registry listener for the default agent."""
|
||||
agent = DefaultAgent(hass, config_intents)
|
||||
agent = DefaultAgent(hass)
|
||||
await entity_component.async_add_entities([agent])
|
||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||
|
||||
@@ -230,14 +232,14 @@ class DefaultAgent(ConversationEntity):
|
||||
_attr_name = "Home Assistant"
|
||||
_attr_supported_features = ConversationEntityFeature.CONTROL
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the default agent."""
|
||||
self.hass = hass
|
||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||
self._load_intents_lock = asyncio.Lock()
|
||||
|
||||
# intent -> [sentences]
|
||||
self._config_intents: dict[str, Any] = config_intents
|
||||
# Intents from common conversation config
|
||||
self._config_intents: dict[str, Any] = {}
|
||||
|
||||
# Sentences that will trigger a callback (skipping intent recognition)
|
||||
self._triggers_details: list[TriggerDetails] = []
|
||||
@@ -1035,6 +1037,14 @@ class DefaultAgent(ConversationEntity):
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
@callback
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self._config_intents = intents
|
||||
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
async def async_prepare(self, language: str | None = None) -> None:
|
||||
"""Load intents for a language."""
|
||||
if language is None:
|
||||
@@ -1159,33 +1169,10 @@ class DefaultAgent(ConversationEntity):
|
||||
custom_sentences_path,
|
||||
)
|
||||
|
||||
# Load sentences from HA config for default language only
|
||||
if self._config_intents and (
|
||||
self.hass.config.language in (language, language_variant)
|
||||
):
|
||||
hass_config_path = self.hass.config.path()
|
||||
merge_dict(
|
||||
intents_dict,
|
||||
{
|
||||
"intents": {
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in self._config_intents.items()
|
||||
}
|
||||
},
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Loaded intents from configuration.yaml",
|
||||
)
|
||||
merge_dict(
|
||||
intents_dict,
|
||||
self._config_intents,
|
||||
)
|
||||
|
||||
if not intents_dict:
|
||||
return None
|
||||
|
||||
@@ -116,20 +116,28 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
is_open_wdr = None
|
||||
is_open_hdr = None
|
||||
reserve3 = product_info.get("reserve4")
|
||||
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
||||
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
||||
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
||||
if supports_wdr_adjustment_val:
|
||||
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
||||
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
||||
is_open_wdr = bool(int(mode))
|
||||
elif supports_hdr_adjustment_val:
|
||||
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
||||
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
||||
is_open_hdr = bool(int(mode))
|
||||
|
||||
model = product_info.get("model")
|
||||
model_int = int(model) if model is not None else 7002
|
||||
if model_int > 7001:
|
||||
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
||||
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
||||
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
||||
if supports_wdr_adjustment_val:
|
||||
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
||||
mode = (
|
||||
is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
||||
)
|
||||
is_open_wdr = bool(int(mode))
|
||||
elif supports_hdr_adjustment_val:
|
||||
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
||||
mode = (
|
||||
is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
||||
)
|
||||
is_open_hdr = bool(int(mode))
|
||||
else:
|
||||
supports_wdr_adjustment_val = False
|
||||
supports_hdr_adjustment_val = False
|
||||
ret_sw, software_capabilities = self.session.getSWCapabilities()
|
||||
|
||||
supports_speak_volume_adjustment_val = (
|
||||
bool(int(software_capabilities.get("swCapabilities1")) & 32)
|
||||
if ret_sw == 0
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
"""Support for the Hive alarm."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HiveConfigEntry
|
||||
from .entity import HiveEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
HIVETOHA = {
|
||||
"home": AlarmControlPanelState.DISARMED,
|
||||
"asleep": AlarmControlPanelState.ARMED_NIGHT,
|
||||
"away": AlarmControlPanelState.ARMED_AWAY,
|
||||
"sos": AlarmControlPanelState.TRIGGERED,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HiveConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Hive thermostat based on a config entry."""
|
||||
|
||||
hive = entry.runtime_data
|
||||
if devices := hive.session.deviceList.get("alarm_control_panel"):
|
||||
async_add_entities(
|
||||
[HiveAlarmControlPanelEntity(hive, dev) for dev in devices], True
|
||||
)
|
||||
|
||||
|
||||
class HiveAlarmControlPanelEntity(HiveEntity, AlarmControlPanelEntity):
|
||||
"""Representation of a Hive alarm."""
|
||||
|
||||
_attr_supported_features = (
|
||||
AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.TRIGGER
|
||||
)
|
||||
_attr_code_arm_required = False
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
await self.hive.alarm.setMode(self.device, "home")
|
||||
|
||||
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
||||
"""Send arm night command."""
|
||||
await self.hive.alarm.setMode(self.device, "asleep")
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
await self.hive.alarm.setMode(self.device, "away")
|
||||
|
||||
async def async_alarm_trigger(self, code: str | None = None) -> None:
|
||||
"""Send alarm trigger command."""
|
||||
await self.hive.alarm.setMode(self.device, "sos")
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update all Node data from Hive."""
|
||||
await self.hive.session.updateData(self.device)
|
||||
self.device = await self.hive.alarm.getAlarm(self.device)
|
||||
self._attr_available = self.device["deviceData"].get("online")
|
||||
if self._attr_available:
|
||||
if self.device["status"]["state"]:
|
||||
self._attr_alarm_state = AlarmControlPanelState.TRIGGERED
|
||||
else:
|
||||
self._attr_alarm_state = HIVETOHA[self.device["status"]["mode"]]
|
||||
@@ -11,7 +11,6 @@ CONFIG_ENTRY_VERSION = 1
|
||||
DEFAULT_NAME = "Hive"
|
||||
DOMAIN = "hive"
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.LIGHT,
|
||||
@@ -20,7 +19,6 @@ PLATFORMS = [
|
||||
Platform.WATER_HEATER,
|
||||
]
|
||||
PLATFORM_LOOKUP = {
|
||||
Platform.ALARM_CONTROL_PANEL: "alarm_control_panel",
|
||||
Platform.BINARY_SENSOR: "binary_sensor",
|
||||
Platform.CLIMATE: "climate",
|
||||
Platform.LIGHT: "light",
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
},
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["apyhiveapi"],
|
||||
"requirements": ["pyhive-integration==1.0.6"]
|
||||
"requirements": ["pyhive-integration==1.0.7"]
|
||||
}
|
||||
|
||||
@@ -1237,7 +1237,7 @@
|
||||
"message": "Error obtaining data from the API: {error}"
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "OAuth2 implementation temporarily unavailable, will retry"
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
},
|
||||
"pause_program": {
|
||||
"message": "Error pausing program: {error}"
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aiopvapi.resources.model import PowerviewData
|
||||
from aiopvapi.resources.shade_data import PowerviewShadeData
|
||||
from aiopvapi.rooms import Rooms
|
||||
from aiopvapi.scenes import Scenes
|
||||
from aiopvapi.shades import Shades
|
||||
@@ -16,7 +17,6 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from .const import DOMAIN, HUB_EXCEPTIONS, MANUFACTURER
|
||||
from .coordinator import PowerviewShadeUpdateCoordinator
|
||||
from .model import PowerviewConfigEntry, PowerviewEntryData
|
||||
from .shade_data import PowerviewShadeData
|
||||
from .util import async_connect_hub
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -8,6 +8,7 @@ import logging
|
||||
|
||||
from aiopvapi.helpers.aiorequest import PvApiMaintenance
|
||||
from aiopvapi.hub import Hub
|
||||
from aiopvapi.resources.shade_data import PowerviewShadeData
|
||||
from aiopvapi.shades import Shades
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -15,7 +16,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import HUB_EXCEPTIONS
|
||||
from .shade_data import PowerviewShadeData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -208,13 +208,13 @@ class PowerViewShadeBase(ShadeEntity, CoverEntity):
|
||||
async def _async_execute_move(self, move: ShadePosition) -> None:
|
||||
"""Execute a move that can affect multiple positions."""
|
||||
_LOGGER.debug("Move request %s: %s", self.name, move)
|
||||
# Store the requested positions so subsequent move
|
||||
# requests contain the secondary shade positions
|
||||
self.data.update_shade_position(self._shade.id, move)
|
||||
async with self.coordinator.radio_operation_lock:
|
||||
response = await self._shade.move(move)
|
||||
_LOGGER.debug("Move response %s: %s", self.name, response)
|
||||
|
||||
# Process the response from the hub (including new positions)
|
||||
self.data.update_shade_position(self._shade.id, response)
|
||||
|
||||
async def _async_set_cover_position(self, target_hass_position: int) -> None:
|
||||
"""Move the shade to a position."""
|
||||
target_hass_position = self._clamp_cover_limit(target_hass_position)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import logging
|
||||
|
||||
from aiopvapi.resources.shade import BaseShade, ShadePosition
|
||||
from aiopvapi.resources.shade_data import PowerviewShadeData
|
||||
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -11,7 +12,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import PowerviewShadeUpdateCoordinator
|
||||
from .model import PowerviewDeviceInfo
|
||||
from .shade_data import PowerviewShadeData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -18,6 +18,6 @@
|
||||
},
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiopvapi"],
|
||||
"requirements": ["aiopvapi==3.2.1"],
|
||||
"requirements": ["aiopvapi==3.3.0"],
|
||||
"zeroconf": ["_powerview._tcp.local.", "_PowerView-G3._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
"""Shade data for the Hunter Douglas PowerView integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import fields
|
||||
from typing import Any
|
||||
|
||||
from aiopvapi.resources.model import PowerviewData
|
||||
from aiopvapi.resources.shade import BaseShade, ShadePosition
|
||||
|
||||
from .util import async_map_data_by_id
|
||||
|
||||
POSITION_FIELDS = [field for field in fields(ShadePosition) if field.name != "velocity"]
|
||||
|
||||
|
||||
def copy_position_data(source: ShadePosition, target: ShadePosition) -> ShadePosition:
|
||||
"""Copy position data from source to target for None values only."""
|
||||
for field in POSITION_FIELDS:
|
||||
if (value := getattr(source, field.name)) is not None:
|
||||
setattr(target, field.name, value)
|
||||
|
||||
|
||||
class PowerviewShadeData:
|
||||
"""Coordinate shade data between multiple api calls."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Init the shade data."""
|
||||
self._raw_data_by_id: dict[int, dict[str | int, Any]] = {}
|
||||
self._shade_group_data_by_id: dict[int, BaseShade] = {}
|
||||
self.positions: dict[int, ShadePosition] = {}
|
||||
|
||||
def get_raw_data(self, shade_id: int) -> dict[str | int, Any]:
|
||||
"""Get data for the shade."""
|
||||
return self._raw_data_by_id[shade_id]
|
||||
|
||||
def get_all_raw_data(self) -> dict[int, dict[str | int, Any]]:
|
||||
"""Get data for all shades."""
|
||||
return self._raw_data_by_id
|
||||
|
||||
def get_shade(self, shade_id: int) -> BaseShade:
|
||||
"""Get specific shade from the coordinator."""
|
||||
return self._shade_group_data_by_id[shade_id]
|
||||
|
||||
def get_shade_position(self, shade_id: int) -> ShadePosition:
|
||||
"""Get positions for a shade."""
|
||||
if shade_id not in self.positions:
|
||||
shade_position = ShadePosition()
|
||||
# If we have the group data, use it to populate the initial position
|
||||
if shade := self._shade_group_data_by_id.get(shade_id):
|
||||
copy_position_data(shade.current_position, shade_position)
|
||||
self.positions[shade_id] = shade_position
|
||||
return self.positions[shade_id]
|
||||
|
||||
def update_from_group_data(self, shade_id: int) -> None:
|
||||
"""Process an update from the group data."""
|
||||
data = self._shade_group_data_by_id[shade_id]
|
||||
copy_position_data(data.current_position, self.get_shade_position(data.id))
|
||||
|
||||
def store_group_data(self, shade_data: PowerviewData) -> None:
|
||||
"""Store data from the all shades endpoint.
|
||||
|
||||
This does not update the shades or positions (self.positions)
|
||||
as the data may be stale. update_from_group_data
|
||||
with a shade_id will update a specific shade
|
||||
from the group data.
|
||||
"""
|
||||
self._shade_group_data_by_id = shade_data.processed
|
||||
self._raw_data_by_id = async_map_data_by_id(shade_data.raw)
|
||||
|
||||
def update_shade_position(self, shade_id: int, new_position: ShadePosition) -> None:
|
||||
"""Update a single shades position."""
|
||||
copy_position_data(new_position, self.get_shade_position(shade_id))
|
||||
|
||||
def update_shade_velocity(self, shade_id: int, shade_data: ShadePosition) -> None:
|
||||
"""Update a single shades velocity."""
|
||||
# the hub will always return a velocity of 0 on initial connect,
|
||||
# separate definition to store consistent value in HA
|
||||
# this value is purely driven from HA
|
||||
if shade_data.velocity is not None:
|
||||
self.get_shade_position(shade_id).velocity = shade_data.velocity
|
||||
@@ -2,25 +2,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from typing import Any
|
||||
|
||||
from aiopvapi.helpers.aiorequest import AioRequest
|
||||
from aiopvapi.helpers.constants import ATTR_ID
|
||||
from aiopvapi.hub import Hub
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .model import PowerviewAPI, PowerviewDeviceInfo
|
||||
|
||||
|
||||
@callback
|
||||
def async_map_data_by_id(data: Iterable[dict[str | int, Any]]):
|
||||
"""Return a dict with the key being the id for a list of entries."""
|
||||
return {entry[ATTR_ID]: entry for entry in data}
|
||||
|
||||
|
||||
async def async_connect_hub(
|
||||
hass: HomeAssistant, address: str, api_version: int | None = None
|
||||
) -> PowerviewAPI:
|
||||
|
||||
@@ -13,6 +13,7 @@ from typing import Any
|
||||
from aiohttp import web
|
||||
from hyperion import client
|
||||
from hyperion.const import (
|
||||
KEY_DATA,
|
||||
KEY_IMAGE,
|
||||
KEY_IMAGE_STREAM,
|
||||
KEY_LEDCOLORS,
|
||||
@@ -155,7 +156,8 @@ class HyperionCamera(Camera):
|
||||
"""Update Hyperion components."""
|
||||
if not img:
|
||||
return
|
||||
img_data = img.get(KEY_RESULT, {}).get(KEY_IMAGE)
|
||||
# Prefer KEY_DATA (Hyperion server >= 2.1.1); fall back to KEY_RESULT for older server versions
|
||||
img_data = img.get(KEY_DATA, img.get(KEY_RESULT, {})).get(KEY_IMAGE)
|
||||
if not img_data or not img_data.startswith(IMAGE_STREAM_JPG_SENTINEL):
|
||||
return
|
||||
async with self._image_cond:
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from propcache.api import cached_property
|
||||
from pyituran import Vehicle
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -69,7 +68,7 @@ class IturanBinarySensor(IturanBaseEntity, BinarySensorEntity):
|
||||
super().__init__(coordinator, license_plate, description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.entity_description.value_fn(self.vehicle)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.device_tracker import TrackerEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -40,12 +38,12 @@ class IturanDeviceTracker(IturanBaseEntity, TrackerEntity):
|
||||
"""Initialize the device tracker."""
|
||||
super().__init__(coordinator, license_plate, "device_tracker")
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def latitude(self) -> float | None:
|
||||
"""Return latitude value of the device."""
|
||||
return self.vehicle.gps_coordinates[0]
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def longitude(self) -> float | None:
|
||||
"""Return longitude value of the device."""
|
||||
return self.vehicle.gps_coordinates[1]
|
||||
|
||||
@@ -6,7 +6,6 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from propcache.api import cached_property
|
||||
from pyituran import Vehicle
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -133,7 +132,7 @@ class IturanSensor(IturanBaseEntity, SensorEntity):
|
||||
super().__init__(coordinator, license_plate, description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the device."""
|
||||
return self.entity_description.value_fn(self.vehicle)
|
||||
|
||||
@@ -94,28 +94,6 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"address_to_device_id": {
|
||||
"description": "Converts an LCN address into a device ID.",
|
||||
"fields": {
|
||||
"host": {
|
||||
"description": "Host name as given in the integration panel.",
|
||||
"name": "Host name"
|
||||
},
|
||||
"id": {
|
||||
"description": "Module or group number of the target.",
|
||||
"name": "Module or group ID"
|
||||
},
|
||||
"segment_id": {
|
||||
"description": "Segment number of the target.",
|
||||
"name": "Segment ID"
|
||||
},
|
||||
"type": {
|
||||
"description": "Module type of the target.",
|
||||
"name": "Type"
|
||||
}
|
||||
},
|
||||
"name": "Address to device ID"
|
||||
},
|
||||
"dyn_text": {
|
||||
"description": "Sends dynamic text to LCN-GTxD displays.",
|
||||
"fields": {
|
||||
|
||||
@@ -353,17 +353,13 @@ DISCOVERY_SCHEMAS = [
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
# DeviceFault or SupplyFault bit enabled
|
||||
device_to_ha={
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault: True,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault: True,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedLow: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedHigh: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kLocalOverride: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemotePressure: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteFlow: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteTemperature: False,
|
||||
}.get,
|
||||
device_to_ha=lambda x: bool(
|
||||
x
|
||||
& (
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault
|
||||
| clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault
|
||||
)
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(
|
||||
@@ -377,9 +373,9 @@ DISCOVERY_SCHEMAS = [
|
||||
key="PumpStatusRunning",
|
||||
translation_key="pump_running",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
device_to_ha=lambda x: (
|
||||
device_to_ha=lambda x: bool(
|
||||
x
|
||||
== clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
|
||||
& clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -395,8 +391,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="dishwasher_alarm_inflow",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
|
||||
device_to_ha=lambda x: bool(
|
||||
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -410,8 +406,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="alarm_door",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
|
||||
device_to_ha=lambda x: bool(
|
||||
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -425,9 +421,10 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="valve_fault_general_fault",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: (
|
||||
# GeneralFault bit from ValveFault attribute
|
||||
device_to_ha=lambda x: bool(
|
||||
x
|
||||
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kGeneralFault
|
||||
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kGeneralFault
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -443,9 +440,10 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="valve_fault_blocked",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: (
|
||||
# Blocked bit from ValveFault attribute
|
||||
device_to_ha=lambda x: bool(
|
||||
x
|
||||
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kBlocked
|
||||
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kBlocked
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -461,9 +459,10 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="valve_fault_leaking",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: (
|
||||
# Leaking bit from ValveFault attribute
|
||||
device_to_ha=lambda x: bool(
|
||||
x
|
||||
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kLeaking
|
||||
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kLeaking
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -478,8 +477,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="alarm_door",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
|
||||
device_to_ha=lambda x: bool(
|
||||
x & clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiomealie==1.0.1"]
|
||||
"requirements": ["aiomealie==1.1.0"]
|
||||
}
|
||||
|
||||
@@ -1009,7 +1009,7 @@
|
||||
"cleaning_care_program": "Cleaning/care program",
|
||||
"maintenance_program": "Maintenance program",
|
||||
"normal_operation_mode": "Normal operation mode",
|
||||
"own_program": "Own program"
|
||||
"own_program": "Program"
|
||||
}
|
||||
},
|
||||
"remaining_time": {
|
||||
@@ -1089,7 +1089,7 @@
|
||||
"message": "Invalid device targeted."
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "OAuth2 implementation unavailable, will retry"
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
},
|
||||
"set_program_error": {
|
||||
"message": "'Set program' action failed: {status} / {message}"
|
||||
|
||||
@@ -13,7 +13,7 @@ from music_assistant_client.exceptions import (
|
||||
from music_assistant_models.api import ServerInfoMessage
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IGNORE, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
@@ -21,21 +21,14 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
DEFAULT_URL = "http://mass.local:8095"
|
||||
DEFAULT_TITLE = "Music Assistant"
|
||||
DEFAULT_URL = "http://mass.local:8095"
|
||||
|
||||
|
||||
def get_manual_schema(user_input: dict[str, Any]) -> vol.Schema:
|
||||
"""Return a schema for the manual step."""
|
||||
default_url = user_input.get(CONF_URL, DEFAULT_URL)
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL, default=default_url): str,
|
||||
}
|
||||
)
|
||||
STEP_USER_SCHEMA = vol.Schema({vol.Required(CONF_URL): str})
|
||||
|
||||
|
||||
async def get_server_info(hass: HomeAssistant, url: str) -> ServerInfoMessage:
|
||||
async def _get_server_info(hass: HomeAssistant, url: str) -> ServerInfoMessage:
|
||||
"""Validate the user input allows us to connect."""
|
||||
async with MusicAssistantClient(
|
||||
url, aiohttp_client.async_get_clientsession(hass)
|
||||
@@ -52,25 +45,17 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Set up flow instance."""
|
||||
self.server_info: ServerInfoMessage | None = None
|
||||
self.url: str | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a manual configuration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
self.server_info = await get_server_info(
|
||||
self.hass, user_input[CONF_URL]
|
||||
)
|
||||
await self.async_set_unique_id(
|
||||
self.server_info.server_id, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_URL: user_input[CONF_URL]},
|
||||
reload_on_update=True,
|
||||
)
|
||||
server_info = await _get_server_info(self.hass, user_input[CONF_URL])
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidServerVersion:
|
||||
@@ -79,68 +64,49 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
},
|
||||
await self.async_set_unique_id(
|
||||
server_info.server_id, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_URL: user_input[CONF_URL]}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=get_manual_schema(user_input), errors=errors
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={CONF_URL: user_input[CONF_URL]},
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=get_manual_schema({}))
|
||||
suggested_values = user_input
|
||||
if suggested_values is None:
|
||||
suggested_values = {CONF_URL: DEFAULT_URL}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_SCHEMA, suggested_values
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered Mass server.
|
||||
|
||||
This flow is triggered by the Zeroconf component. It will check if the
|
||||
host is already configured and delegate to the import step if not.
|
||||
"""
|
||||
# abort if discovery info is not what we expect
|
||||
if "server_id" not in discovery_info.properties:
|
||||
return self.async_abort(reason="missing_server_id")
|
||||
|
||||
self.server_info = ServerInfoMessage.from_dict(discovery_info.properties)
|
||||
await self.async_set_unique_id(self.server_info.server_id)
|
||||
|
||||
# Check if we already have a config entry for this server_id
|
||||
existing_entry = self.hass.config_entries.async_entry_for_domain_unique_id(
|
||||
DOMAIN, self.server_info.server_id
|
||||
)
|
||||
|
||||
if existing_entry:
|
||||
# If the entry was ignored or disabled, don't make any changes
|
||||
if existing_entry.source == SOURCE_IGNORE or existing_entry.disabled_by:
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
# Test connectivity to the current URL first
|
||||
current_url = existing_entry.data[CONF_URL]
|
||||
try:
|
||||
await get_server_info(self.hass, current_url)
|
||||
# Current URL is working, no need to update
|
||||
return self.async_abort(reason="already_configured")
|
||||
except CannotConnect:
|
||||
# Current URL is not working, update to the discovered URL
|
||||
# and continue to discovery confirm
|
||||
self.hass.config_entries.async_update_entry(
|
||||
existing_entry,
|
||||
data={**existing_entry.data, CONF_URL: self.server_info.base_url},
|
||||
)
|
||||
# Schedule reload since URL changed
|
||||
self.hass.config_entries.async_schedule_reload(existing_entry.entry_id)
|
||||
else:
|
||||
# No existing entry, proceed with normal flow
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# Test connectivity to the discovered URL
|
||||
"""Handle a zeroconf discovery for a Music Assistant server."""
|
||||
try:
|
||||
await get_server_info(self.hass, self.server_info.base_url)
|
||||
server_info = ServerInfoMessage.from_dict(discovery_info.properties)
|
||||
except LookupError:
|
||||
return self.async_abort(reason="invalid_discovery_info")
|
||||
|
||||
self.url = server_info.base_url
|
||||
|
||||
await self.async_set_unique_id(server_info.server_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_URL: self.url})
|
||||
|
||||
try:
|
||||
await _get_server_info(self.hass, self.url)
|
||||
except CannotConnect:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
@@ -148,16 +114,16 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle user-confirmation of discovered server."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.server_info is not None
|
||||
assert self.url is not None
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={
|
||||
CONF_URL: self.server_info.base_url,
|
||||
},
|
||||
data={CONF_URL: self.url},
|
||||
)
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
description_placeholders={"url": self.server_info.base_url},
|
||||
description_placeholders={"url": self.url},
|
||||
)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "music_assistant",
|
||||
"name": "Music Assistant",
|
||||
"after_dependencies": ["media_source", "media_player"],
|
||||
"codeowners": ["@music-assistant"],
|
||||
"codeowners": ["@music-assistant", "@arturpragacz"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/music_assistant",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
"message": "Error while loading the integration."
|
||||
},
|
||||
"implementation_unavailable": {
|
||||
"message": "OAuth2 implementation is not available, will retry."
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
},
|
||||
"incorrect_oauth2_scope": {
|
||||
"message": "Stored permissions are invalid. Please login again to update permissions."
|
||||
|
||||
@@ -12,7 +12,12 @@ from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .const import _LOGGER
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SCENE]
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.LIGHT,
|
||||
Platform.SCENE,
|
||||
]
|
||||
|
||||
type NikoHomeControlConfigEntry = ConfigEntry[NHCController]
|
||||
|
||||
|
||||
100
homeassistant/components/niko_home_control/climate.py
Normal file
100
homeassistant/components/niko_home_control/climate.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""Support for Niko Home Control thermostats."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from nhc.const import THERMOSTAT_MODES, THERMOSTAT_MODES_REVERSE
|
||||
from nhc.thermostat import NHCThermostat
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_ECO,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.components.sensor import UnitOfTemperature
|
||||
from homeassistant.const import ATTR_TEMPERATURE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import NikoHomeControlConfigEntry
|
||||
from .const import (
|
||||
NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP,
|
||||
NikoHomeControlThermostatModes,
|
||||
)
|
||||
from .entity import NikoHomeControlEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: NikoHomeControlConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Niko Home Control thermostat entry."""
|
||||
controller = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
NikoHomeControlClimate(thermostat, controller, entry.entry_id)
|
||||
for thermostat in controller.thermostats.values()
|
||||
)
|
||||
|
||||
|
||||
class NikoHomeControlClimate(NikoHomeControlEntity, ClimateEntity):
|
||||
"""Representation of a Niko Home Control thermostat."""
|
||||
|
||||
_attr_supported_features: ClimateEntityFeature = (
|
||||
ClimateEntityFeature.PRESET_MODE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_name = None
|
||||
_action: NHCThermostat
|
||||
|
||||
_attr_translation_key = "nhc_thermostat"
|
||||
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.COOL, HVACMode.AUTO]
|
||||
|
||||
_attr_preset_modes = [
|
||||
"day",
|
||||
"night",
|
||||
PRESET_ECO,
|
||||
"prog1",
|
||||
"prog2",
|
||||
"prog3",
|
||||
]
|
||||
|
||||
def _get_niko_mode(self, mode: str) -> int:
|
||||
"""Return the Niko mode."""
|
||||
return THERMOSTAT_MODES_REVERSE.get(mode, NikoHomeControlThermostatModes.OFF)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
await self._action.set_temperature(kwargs.get(ATTR_TEMPERATURE))
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
await self._action.set_mode(self._get_niko_mode(preset_mode))
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
await self._action.set_mode(NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP[hvac_mode])
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn thermostat off."""
|
||||
await self._action.set_mode(NikoHomeControlThermostatModes.OFF)
|
||||
|
||||
def update_state(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
if self._action.state == NikoHomeControlThermostatModes.OFF:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_preset_mode = None
|
||||
elif self._action.state == NikoHomeControlThermostatModes.COOL:
|
||||
self._attr_hvac_mode = HVACMode.COOL
|
||||
self._attr_preset_mode = None
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.AUTO
|
||||
self._attr_preset_mode = THERMOSTAT_MODES[self._action.state]
|
||||
|
||||
self._attr_target_temperature = self._action.setpoint
|
||||
self._attr_current_temperature = self._action.measured
|
||||
@@ -1,6 +1,23 @@
|
||||
"""Constants for niko_home_control integration."""
|
||||
|
||||
from enum import IntEnum
|
||||
import logging
|
||||
|
||||
from homeassistant.components.climate import HVACMode
|
||||
|
||||
DOMAIN = "niko_home_control"
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP = {
|
||||
HVACMode.OFF: 3,
|
||||
HVACMode.COOL: 4,
|
||||
HVACMode.AUTO: 5,
|
||||
}
|
||||
|
||||
|
||||
class NikoHomeControlThermostatModes(IntEnum):
|
||||
"""Enum for Niko Home Control thermostat modes."""
|
||||
|
||||
OFF = 3
|
||||
COOL = 4
|
||||
AUTO = 5
|
||||
|
||||
20
homeassistant/components/niko_home_control/icons.json
Normal file
20
homeassistant/components/niko_home_control/icons.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"entity": {
|
||||
"climate": {
|
||||
"nhc_thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"default": "mdi:calendar-clock",
|
||||
"state": {
|
||||
"day": "mdi:weather-sunny",
|
||||
"night": "mdi:weather-night",
|
||||
"prog1": "mdi:numeric-1",
|
||||
"prog2": "mdi:numeric-2",
|
||||
"prog3": "mdi:numeric-3"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,5 +26,23 @@
|
||||
"description": "Set up your Niko Home Control instance."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"climate": {
|
||||
"nhc_thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"day": "Day",
|
||||
"eco": "Eco",
|
||||
"night": "Night",
|
||||
"prog1": "Program 1",
|
||||
"prog2": "Program 2",
|
||||
"prog3": "Program 3"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/palazzetti",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pypalazzetti==0.1.19"]
|
||||
"requirements": ["pypalazzetti==0.1.20"]
|
||||
}
|
||||
|
||||
@@ -256,6 +256,7 @@ class PlaystationNetworkFriendDataCoordinator(
|
||||
account_id=self.user.account_id,
|
||||
presence=self.user.get_presence(),
|
||||
profile=self.profile,
|
||||
trophy_summary=self.user.trophy_summary(),
|
||||
)
|
||||
except PSNAWPForbiddenError as error:
|
||||
raise UpdateFailed(
|
||||
|
||||
@@ -54,7 +54,7 @@ class PlaystationNetworkSensor(StrEnum):
|
||||
NOW_PLAYING = "now_playing"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS_TROPHY: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
|
||||
SENSOR_DESCRIPTIONS: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
|
||||
PlaystationNetworkSensorEntityDescription(
|
||||
key=PlaystationNetworkSensor.TROPHY_LEVEL,
|
||||
translation_key=PlaystationNetworkSensor.TROPHY_LEVEL,
|
||||
@@ -106,8 +106,6 @@ SENSOR_DESCRIPTIONS_TROPHY: tuple[PlaystationNetworkSensorEntityDescription, ...
|
||||
else None
|
||||
),
|
||||
),
|
||||
)
|
||||
SENSOR_DESCRIPTIONS_USER: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
|
||||
PlaystationNetworkSensorEntityDescription(
|
||||
key=PlaystationNetworkSensor.ONLINE_ID,
|
||||
translation_key=PlaystationNetworkSensor.ONLINE_ID,
|
||||
@@ -152,7 +150,7 @@ async def async_setup_entry(
|
||||
coordinator = config_entry.runtime_data.user_data
|
||||
async_add_entities(
|
||||
PlaystationNetworkSensorEntity(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS_TROPHY + SENSOR_DESCRIPTIONS_USER
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
)
|
||||
|
||||
for (
|
||||
@@ -166,7 +164,7 @@ async def async_setup_entry(
|
||||
description,
|
||||
config_entry.subentries[subentry_id],
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS_USER
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
],
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
@@ -57,12 +57,14 @@ type SelectType = Literal[
|
||||
"select_gateway_mode",
|
||||
"select_regulation_mode",
|
||||
"select_schedule",
|
||||
"select_zone_profile",
|
||||
]
|
||||
type SelectOptionsType = Literal[
|
||||
"available_schedules",
|
||||
"dhw_modes",
|
||||
"gateway_modes",
|
||||
"regulation_modes",
|
||||
"available_schedules",
|
||||
"zone_profiles",
|
||||
]
|
||||
|
||||
# Default directives
|
||||
@@ -82,3 +84,10 @@ MASTER_THERMOSTATS: Final[list[str]] = [
|
||||
"zone_thermometer",
|
||||
"zone_thermostat",
|
||||
]
|
||||
|
||||
# Select constants
|
||||
SELECT_DHW_MODE: Final = "select_dhw_mode"
|
||||
SELECT_GATEWAY_MODE: Final = "select_gateway_mode"
|
||||
SELECT_REGULATION_MODE: Final = "select_regulation_mode"
|
||||
SELECT_SCHEDULE: Final = "select_schedule"
|
||||
SELECT_ZONE_PROFILE: Final = "select_zone_profile"
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["plugwise"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["plugwise==1.9.0"],
|
||||
"requirements": ["plugwise==1.10.0"],
|
||||
"zeroconf": ["_plugwise._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,15 @@ from homeassistant.const import STATE_ON, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import SelectOptionsType, SelectType
|
||||
from .const import (
|
||||
SELECT_DHW_MODE,
|
||||
SELECT_GATEWAY_MODE,
|
||||
SELECT_REGULATION_MODE,
|
||||
SELECT_SCHEDULE,
|
||||
SELECT_ZONE_PROFILE,
|
||||
SelectOptionsType,
|
||||
SelectType,
|
||||
)
|
||||
from .coordinator import PlugwiseConfigEntry, PlugwiseDataUpdateCoordinator
|
||||
from .entity import PlugwiseEntity
|
||||
from .util import plugwise_command
|
||||
@@ -27,28 +35,34 @@ class PlugwiseSelectEntityDescription(SelectEntityDescription):
|
||||
|
||||
SELECT_TYPES = (
|
||||
PlugwiseSelectEntityDescription(
|
||||
key="select_schedule",
|
||||
translation_key="select_schedule",
|
||||
key=SELECT_SCHEDULE,
|
||||
translation_key=SELECT_SCHEDULE,
|
||||
options_key="available_schedules",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
key="select_regulation_mode",
|
||||
translation_key="regulation_mode",
|
||||
key=SELECT_REGULATION_MODE,
|
||||
translation_key=SELECT_REGULATION_MODE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="regulation_modes",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
key="select_dhw_mode",
|
||||
translation_key="dhw_mode",
|
||||
key=SELECT_DHW_MODE,
|
||||
translation_key=SELECT_DHW_MODE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="dhw_modes",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
key="select_gateway_mode",
|
||||
translation_key="gateway_mode",
|
||||
key=SELECT_GATEWAY_MODE,
|
||||
translation_key=SELECT_GATEWAY_MODE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="gateway_modes",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
key=SELECT_ZONE_PROFILE,
|
||||
translation_key=SELECT_ZONE_PROFILE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="zone_profiles",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"dhw_mode": {
|
||||
"select_dhw_mode": {
|
||||
"name": "DHW mode",
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
@@ -118,7 +118,7 @@
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"gateway_mode": {
|
||||
"select_gateway_mode": {
|
||||
"name": "Gateway mode",
|
||||
"state": {
|
||||
"away": "Pause",
|
||||
@@ -126,7 +126,7 @@
|
||||
"vacation": "Vacation"
|
||||
}
|
||||
},
|
||||
"regulation_mode": {
|
||||
"select_regulation_mode": {
|
||||
"name": "Regulation mode",
|
||||
"state": {
|
||||
"bleeding_cold": "Bleeding cold",
|
||||
@@ -141,6 +141,14 @@
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"select_zone_profile": {
|
||||
"name": "Zone profile",
|
||||
"state": {
|
||||
"active": "[%key:common::state::active%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"passive": "Passive"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
|
||||
@@ -26,6 +26,9 @@ def validate_db_schema(instance: Recorder) -> set[str]:
|
||||
schema_errors |= validate_table_schema_supports_utf8(
|
||||
instance, StatisticsMeta, (StatisticsMeta.statistic_id,)
|
||||
)
|
||||
schema_errors |= validate_table_schema_has_correct_collation(
|
||||
instance, StatisticsMeta
|
||||
)
|
||||
for table in (Statistics, StatisticsShortTerm):
|
||||
schema_errors |= validate_db_schema_precision(instance, table)
|
||||
schema_errors |= validate_table_schema_has_correct_collation(instance, table)
|
||||
|
||||
@@ -54,7 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
|
||||
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
||||
STATES_META_SCHEMA_VERSION = 38
|
||||
CIRCULAR_MEAN_SCHEMA_VERSION = 49
|
||||
UNIT_CLASS_SCHEMA_VERSION = 51
|
||||
UNIT_CLASS_SCHEMA_VERSION = 52
|
||||
|
||||
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
||||
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43
|
||||
|
||||
@@ -71,7 +71,7 @@ class LegacyBase(DeclarativeBase):
|
||||
"""Base class for tables, used for schema migration."""
|
||||
|
||||
|
||||
SCHEMA_VERSION = 51
|
||||
SCHEMA_VERSION = 52
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -13,7 +13,15 @@ from typing import TYPE_CHECKING, Any, TypedDict, cast, final
|
||||
from uuid import UUID
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text, update
|
||||
from sqlalchemy import (
|
||||
ForeignKeyConstraint,
|
||||
MetaData,
|
||||
Table,
|
||||
cast as cast_,
|
||||
func,
|
||||
text,
|
||||
update,
|
||||
)
|
||||
from sqlalchemy.engine import CursorResult, Engine
|
||||
from sqlalchemy.exc import (
|
||||
DatabaseError,
|
||||
@@ -26,8 +34,9 @@ from sqlalchemy.exc import (
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint
|
||||
from sqlalchemy.sql.expression import true
|
||||
from sqlalchemy.sql.expression import and_, true
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.types import BINARY
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
@@ -2044,14 +2053,74 @@ class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50):
|
||||
class _SchemaVersion51Migrator(_SchemaVersionMigrator, target_version=51):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
# Add unit class column to StatisticsMeta
|
||||
# Replaced with version 52 which corrects issues with MySQL string comparisons.
|
||||
|
||||
|
||||
class _SchemaVersion52Migrator(_SchemaVersionMigrator, target_version=52):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
if self.engine.dialect.name == SupportedDialect.MYSQL:
|
||||
self._apply_update_mysql()
|
||||
else:
|
||||
self._apply_update_postgresql_sqlite()
|
||||
|
||||
def _apply_update_mysql(self) -> None:
|
||||
"""Version specific update method for mysql."""
|
||||
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
|
||||
with session_scope(session=self.session_maker()) as session:
|
||||
connection = session.connection()
|
||||
for conv in _PRIMARY_UNIT_CONVERTERS:
|
||||
case_sensitive_units = {
|
||||
u.encode("utf-8") if u else u for u in conv.VALID_UNITS
|
||||
}
|
||||
# Reset unit_class to None for entries that do not match
|
||||
# the valid units (case sensitive) but matched before due to
|
||||
# case insensitive comparisons.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS))
|
||||
.where(
|
||||
and_(
|
||||
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
|
||||
cast_(StatisticsMeta.unit_of_measurement, BINARY).not_in(
|
||||
case_sensitive_units
|
||||
),
|
||||
)
|
||||
)
|
||||
.values(unit_class=None)
|
||||
)
|
||||
# Do an explicitly case sensitive match (actually binary) to set the
|
||||
# correct unit_class. This is needed because we use the case sensitive
|
||||
# utf8mb4_unicode_ci collation.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(
|
||||
and_(
|
||||
cast_(StatisticsMeta.unit_of_measurement, BINARY).in_(
|
||||
case_sensitive_units
|
||||
),
|
||||
StatisticsMeta.unit_class.is_(None),
|
||||
)
|
||||
)
|
||||
.values(unit_class=conv.UNIT_CLASS)
|
||||
)
|
||||
|
||||
def _apply_update_postgresql_sqlite(self) -> None:
|
||||
"""Version specific update method for postgresql and sqlite."""
|
||||
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
|
||||
with session_scope(session=self.session_maker()) as session:
|
||||
connection = session.connection()
|
||||
for conv in _PRIMARY_UNIT_CONVERTERS:
|
||||
# Set the correct unit_class. Unlike MySQL, Postgres and SQLite
|
||||
# have case sensitive string comparisons by default, so we
|
||||
# can directly match on the valid units.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(
|
||||
and_(
|
||||
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
|
||||
StatisticsMeta.unit_class.is_(None),
|
||||
)
|
||||
)
|
||||
.values(unit_class=conv.UNIT_CLASS)
|
||||
)
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ CACHE_SIZE = 8192
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
QUERY_STATISTIC_META = (
|
||||
QUERY_STATISTICS_META = (
|
||||
StatisticsMeta.id,
|
||||
StatisticsMeta.statistic_id,
|
||||
StatisticsMeta.source,
|
||||
@@ -55,7 +55,7 @@ def _generate_get_metadata_stmt(
|
||||
|
||||
Depending on the schema version, either mean_type (added in version 49) or has_mean column is used.
|
||||
"""
|
||||
columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTIC_META)
|
||||
columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTICS_META)
|
||||
if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION:
|
||||
columns.append(StatisticsMeta.mean_type)
|
||||
else:
|
||||
|
||||
@@ -2,12 +2,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from satel_integra.satel_integra import AsyncSatel
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -17,6 +20,7 @@ from .const import (
|
||||
CONF_ZONE_NUMBER,
|
||||
CONF_ZONE_TYPE,
|
||||
CONF_ZONES,
|
||||
DOMAIN,
|
||||
SIGNAL_OUTPUTS_UPDATED,
|
||||
SIGNAL_ZONES_UPDATED,
|
||||
SUBENTRY_TYPE_OUTPUT,
|
||||
@@ -40,9 +44,9 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
for subentry in zone_subentries:
|
||||
zone_num = subentry.data[CONF_ZONE_NUMBER]
|
||||
zone_type = subentry.data[CONF_ZONE_TYPE]
|
||||
zone_name = subentry.data[CONF_NAME]
|
||||
zone_num: int = subentry.data[CONF_ZONE_NUMBER]
|
||||
zone_type: BinarySensorDeviceClass = subentry.data[CONF_ZONE_TYPE]
|
||||
zone_name: str = subentry.data[CONF_NAME]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
@@ -65,9 +69,9 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
for subentry in output_subentries:
|
||||
output_num = subentry.data[CONF_OUTPUT_NUMBER]
|
||||
ouput_type = subentry.data[CONF_ZONE_TYPE]
|
||||
output_name = subentry.data[CONF_NAME]
|
||||
output_num: int = subentry.data[CONF_OUTPUT_NUMBER]
|
||||
ouput_type: BinarySensorDeviceClass = subentry.data[CONF_ZONE_TYPE]
|
||||
output_name: str = subentry.data[CONF_NAME]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
@@ -89,68 +93,48 @@ class SatelIntegraBinarySensor(BinarySensorEntity):
|
||||
"""Representation of an Satel Integra binary sensor."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
controller,
|
||||
device_number,
|
||||
device_name,
|
||||
zone_type,
|
||||
sensor_type,
|
||||
react_to_signal,
|
||||
config_entry_id,
|
||||
):
|
||||
controller: AsyncSatel,
|
||||
device_number: int,
|
||||
device_name: str,
|
||||
device_class: BinarySensorDeviceClass,
|
||||
sensor_type: str,
|
||||
react_to_signal: str,
|
||||
config_entry_id: str,
|
||||
) -> None:
|
||||
"""Initialize the binary_sensor."""
|
||||
self._device_number = device_number
|
||||
self._attr_unique_id = f"{config_entry_id}_{sensor_type}_{device_number}"
|
||||
self._name = device_name
|
||||
self._zone_type = zone_type
|
||||
self._state = 0
|
||||
self._react_to_signal = react_to_signal
|
||||
self._satel = controller
|
||||
|
||||
self._attr_device_class = device_class
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=device_name, identifiers={(DOMAIN, self._attr_unique_id)}
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
if self._react_to_signal == SIGNAL_OUTPUTS_UPDATED:
|
||||
if self._device_number in self._satel.violated_outputs:
|
||||
self._state = 1
|
||||
else:
|
||||
self._state = 0
|
||||
elif self._device_number in self._satel.violated_zones:
|
||||
self._state = 1
|
||||
self._attr_is_on = self._device_number in self._satel.violated_outputs
|
||||
else:
|
||||
self._state = 0
|
||||
self._attr_is_on = self._device_number in self._satel.violated_zones
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass, self._react_to_signal, self._devices_updated
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the entity."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def icon(self) -> str | None:
|
||||
"""Icon for device by its type."""
|
||||
if self._zone_type is BinarySensorDeviceClass.SMOKE:
|
||||
return "mdi:fire"
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if sensor is on."""
|
||||
return self._state == 1
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of this sensor, from DEVICE_CLASSES."""
|
||||
return self._zone_type
|
||||
|
||||
@callback
|
||||
def _devices_updated(self, zones):
|
||||
def _devices_updated(self, zones: dict[int, int]):
|
||||
"""Update the zone's state, if needed."""
|
||||
if self._device_number in zones and self._state != zones[self._device_number]:
|
||||
self._state = zones[self._device_number]
|
||||
self.async_write_ha_state()
|
||||
if self._device_number in zones:
|
||||
new_state = zones[self._device_number] == 1
|
||||
if new_state != self._attr_is_on:
|
||||
self._attr_is_on = new_state
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -12,10 +12,11 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_entry_oauth2_flow,
|
||||
config_validation as cv,
|
||||
httpx_client,
|
||||
from homeassistant.helpers import config_validation as cv, httpx_client
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -28,19 +29,21 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR]
|
||||
|
||||
type SENZDataUpdateCoordinator = DataUpdateCoordinator[dict[str, Thermostat]]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up SENZ from a config entry."""
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="oauth2_implementation_unavailable",
|
||||
) from err
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
auth = SENZConfigEntryAuth(httpx_client.get_async_client(hass), session)
|
||||
senz_api = SENZAPI(auth)
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class SENZClimate(CoordinatorEntity, ClimateEntity):
|
||||
class SENZClimate(CoordinatorEntity[SENZDataUpdateCoordinator], ClimateEntity):
|
||||
"""Representation of a SENZ climate entity."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
29
homeassistant/components/senz/diagnostics.py
Normal file
29
homeassistant/components/senz/diagnostics.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Diagnostics platform for Senz integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TO_REDACT = [
|
||||
"access_token",
|
||||
"refresh_token",
|
||||
]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
raw_data = (
|
||||
[device.raw_data for device in hass.data[DOMAIN][entry.entry_id].data.values()],
|
||||
)
|
||||
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"thermostats": raw_data,
|
||||
}
|
||||
93
homeassistant/components/senz/sensor.py
Normal file
93
homeassistant/components/senz/sensor.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""nVent RAYCHEM SENZ sensor platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from aiosenz import Thermostat
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import SENZDataUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class SenzSensorDescription(SensorEntityDescription):
|
||||
"""Describes SENZ sensor entity."""
|
||||
|
||||
value_fn: Callable[[Thermostat], str | int | float | None]
|
||||
|
||||
|
||||
SENSORS: tuple[SenzSensorDescription, ...] = (
|
||||
SenzSensorDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda data: data.current_temperatue,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the SENZ sensor entities from a config entry."""
|
||||
coordinator: SENZDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities(
|
||||
SENZSensor(thermostat, coordinator, description)
|
||||
for description in SENSORS
|
||||
for thermostat in coordinator.data.values()
|
||||
)
|
||||
|
||||
|
||||
class SENZSensor(CoordinatorEntity[SENZDataUpdateCoordinator], SensorEntity):
|
||||
"""Representation of a SENZ sensor entity."""
|
||||
|
||||
entity_description: SenzSensorDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
thermostat: Thermostat,
|
||||
coordinator: SENZDataUpdateCoordinator,
|
||||
description: SenzSensorDescription,
|
||||
) -> None:
|
||||
"""Init SENZ sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._thermostat = thermostat
|
||||
self._attr_unique_id = f"{thermostat.serial_number}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, thermostat.serial_number)},
|
||||
manufacturer="nVent Raychem",
|
||||
model="SENZ WIFI",
|
||||
name=thermostat.name,
|
||||
serial_number=thermostat.serial_number,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if the thermostat is available."""
|
||||
return super().available and self._thermostat.online
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | float | int | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self._thermostat)
|
||||
@@ -25,5 +25,10 @@
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError
|
||||
|
||||
from homeassistant.components.number import (
|
||||
DOMAIN as NUMBER_PLATFORM,
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberExtraStoredData,
|
||||
@@ -107,6 +108,9 @@ class RpcNumber(ShellyRpcAttributeEntity, NumberEntity):
|
||||
if description.mode_fn is not None:
|
||||
self._attr_mode = description.mode_fn(coordinator.device.config[key])
|
||||
|
||||
if hasattr(self, "_attr_name") and description.role != ROLE_GENERIC:
|
||||
delattr(self, "_attr_name")
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return value of number."""
|
||||
@@ -181,7 +185,6 @@ NUMBERS: dict[tuple[str, str], BlockNumberDescription] = {
|
||||
("device", "valvePos"): BlockNumberDescription(
|
||||
key="device|valvepos",
|
||||
translation_key="valve_position",
|
||||
name="Valve position",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
available=lambda block: cast(int, block.valveError) != 1,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -200,12 +203,12 @@ RPC_NUMBERS: Final = {
|
||||
key="blutrv",
|
||||
sub_key="current_C",
|
||||
translation_key="external_temperature",
|
||||
name="External temperature",
|
||||
native_min_value=-50,
|
||||
native_max_value=50,
|
||||
native_step=0.1,
|
||||
mode=NumberMode.BOX,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
method="blu_trv_set_external_temperature",
|
||||
entity_class=RpcBluTrvExtTempNumber,
|
||||
@@ -213,7 +216,7 @@ RPC_NUMBERS: Final = {
|
||||
"number_generic": RpcNumberDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
removal_condition=lambda config, _status, key: not is_view_for_platform(
|
||||
removal_condition=lambda config, _, key: not is_view_for_platform(
|
||||
config, key, NUMBER_PLATFORM
|
||||
),
|
||||
max_fn=lambda config: config["max"],
|
||||
@@ -229,9 +232,11 @@ RPC_NUMBERS: Final = {
|
||||
"number_current_limit": RpcNumberDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
translation_key="current_limit",
|
||||
device_class=NumberDeviceClass.CURRENT,
|
||||
max_fn=lambda config: config["max"],
|
||||
min_fn=lambda config: config["min"],
|
||||
mode_fn=lambda config: NumberMode.SLIDER,
|
||||
mode_fn=lambda _: NumberMode.SLIDER,
|
||||
step_fn=lambda config: config["meta"]["ui"].get("step"),
|
||||
unit=get_virtual_component_unit,
|
||||
method="number_set",
|
||||
@@ -241,10 +246,11 @@ RPC_NUMBERS: Final = {
|
||||
"number_position": RpcNumberDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
translation_key="valve_position",
|
||||
entity_registry_enabled_default=False,
|
||||
max_fn=lambda config: config["max"],
|
||||
min_fn=lambda config: config["min"],
|
||||
mode_fn=lambda config: NumberMode.SLIDER,
|
||||
mode_fn=lambda _: NumberMode.SLIDER,
|
||||
step_fn=lambda config: config["meta"]["ui"].get("step"),
|
||||
unit=get_virtual_component_unit,
|
||||
method="number_set",
|
||||
@@ -254,10 +260,12 @@ RPC_NUMBERS: Final = {
|
||||
"number_target_humidity": RpcNumberDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
translation_key="target_humidity",
|
||||
device_class=NumberDeviceClass.HUMIDITY,
|
||||
entity_registry_enabled_default=False,
|
||||
max_fn=lambda config: config["max"],
|
||||
min_fn=lambda config: config["min"],
|
||||
mode_fn=lambda config: NumberMode.SLIDER,
|
||||
mode_fn=lambda _: NumberMode.SLIDER,
|
||||
step_fn=lambda config: config["meta"]["ui"].get("step"),
|
||||
unit=get_virtual_component_unit,
|
||||
method="number_set",
|
||||
@@ -267,10 +275,12 @@ RPC_NUMBERS: Final = {
|
||||
"number_target_temperature": RpcNumberDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
translation_key="target_temperature",
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
entity_registry_enabled_default=False,
|
||||
max_fn=lambda config: config["max"],
|
||||
min_fn=lambda config: config["min"],
|
||||
mode_fn=lambda config: NumberMode.SLIDER,
|
||||
mode_fn=lambda _: NumberMode.SLIDER,
|
||||
step_fn=lambda config: config["meta"]["ui"].get("step"),
|
||||
unit=get_virtual_component_unit,
|
||||
method="number_set",
|
||||
@@ -281,21 +291,20 @@ RPC_NUMBERS: Final = {
|
||||
key="blutrv",
|
||||
sub_key="pos",
|
||||
translation_key="valve_position",
|
||||
name="Valve position",
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
method="blu_trv_set_valve_position",
|
||||
removal_condition=lambda config, _status, key: config[key].get("enable", True)
|
||||
removal_condition=lambda config, _, key: config[key].get("enable", True)
|
||||
is True,
|
||||
entity_class=RpcBluTrvNumber,
|
||||
),
|
||||
"left_slot_intensity": RpcNumberDescription(
|
||||
key="cury",
|
||||
sub_key="slots",
|
||||
name="Left slot intensity",
|
||||
translation_key="left_slot_intensity",
|
||||
value=lambda status, _: status["left"]["intensity"],
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
@@ -311,7 +320,7 @@ RPC_NUMBERS: Final = {
|
||||
"right_slot_intensity": RpcNumberDescription(
|
||||
key="cury",
|
||||
sub_key="slots",
|
||||
name="Right slot intensity",
|
||||
translation_key="right_slot_intensity",
|
||||
value=lambda status, _: status["right"]["intensity"],
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
@@ -402,6 +411,9 @@ class BlockSleepingNumber(ShellySleepingBlockAttributeEntity, RestoreNumber):
|
||||
self.restored_data: NumberExtraStoredData | None = None
|
||||
super().__init__(coordinator, block, attribute, description, entry)
|
||||
|
||||
if hasattr(self, "_attr_name"):
|
||||
delattr(self, "_attr_name")
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@@ -188,6 +188,29 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"current_limit": {
|
||||
"name": "Current limit"
|
||||
},
|
||||
"external_temperature": {
|
||||
"name": "External temperature"
|
||||
},
|
||||
"left_slot_intensity": {
|
||||
"name": "Left slot intensity"
|
||||
},
|
||||
"right_slot_intensity": {
|
||||
"name": "Right slot intensity"
|
||||
},
|
||||
"target_humidity": {
|
||||
"name": "Target humidity"
|
||||
},
|
||||
"target_temperature": {
|
||||
"name": "Target temperature"
|
||||
},
|
||||
"valve_position": {
|
||||
"name": "Valve position"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"cury_mode": {
|
||||
"name": "Mode",
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.3.1"]
|
||||
"requirements": ["pysmartthings==3.3.2"]
|
||||
}
|
||||
|
||||
@@ -75,6 +75,7 @@ PLATFORMS_BY_TYPE = {
|
||||
SupportedModels.HUBMINI_MATTER.value: [Platform.SENSOR],
|
||||
SupportedModels.CIRCULATOR_FAN.value: [Platform.FAN, Platform.SENSOR],
|
||||
SupportedModels.S10_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
|
||||
SupportedModels.S20_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
|
||||
SupportedModels.K10_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
|
||||
SupportedModels.K10_PRO_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
|
||||
SupportedModels.K10_PRO_COMBO_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
|
||||
@@ -102,6 +103,10 @@ PLATFORMS_BY_TYPE = {
|
||||
SupportedModels.RELAY_SWITCH_2PM.value: [Platform.SWITCH, Platform.SENSOR],
|
||||
SupportedModels.GARAGE_DOOR_OPENER.value: [Platform.COVER, Platform.SENSOR],
|
||||
SupportedModels.CLIMATE_PANEL.value: [Platform.SENSOR, Platform.BINARY_SENSOR],
|
||||
SupportedModels.SMART_THERMOSTAT_RADIATOR.value: [
|
||||
Platform.CLIMATE,
|
||||
Platform.SENSOR,
|
||||
],
|
||||
}
|
||||
CLASS_BY_DEVICE = {
|
||||
SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight,
|
||||
@@ -119,6 +124,7 @@ CLASS_BY_DEVICE = {
|
||||
SupportedModels.ROLLER_SHADE.value: switchbot.SwitchbotRollerShade,
|
||||
SupportedModels.CIRCULATOR_FAN.value: switchbot.SwitchbotFan,
|
||||
SupportedModels.S10_VACUUM.value: switchbot.SwitchbotVacuum,
|
||||
SupportedModels.S20_VACUUM.value: switchbot.SwitchbotVacuum,
|
||||
SupportedModels.K10_VACUUM.value: switchbot.SwitchbotVacuum,
|
||||
SupportedModels.K10_PRO_VACUUM.value: switchbot.SwitchbotVacuum,
|
||||
SupportedModels.K10_PRO_COMBO_VACUUM.value: switchbot.SwitchbotVacuum,
|
||||
@@ -136,6 +142,7 @@ CLASS_BY_DEVICE = {
|
||||
SupportedModels.PLUG_MINI_EU.value: switchbot.SwitchbotRelaySwitch,
|
||||
SupportedModels.RELAY_SWITCH_2PM.value: switchbot.SwitchbotRelaySwitch2PM,
|
||||
SupportedModels.GARAGE_DOOR_OPENER.value: switchbot.SwitchbotGarageDoorOpener,
|
||||
SupportedModels.SMART_THERMOSTAT_RADIATOR.value: switchbot.SwitchbotSmartThermostatRadiator,
|
||||
}
|
||||
|
||||
|
||||
|
||||
140
homeassistant/components/switchbot/climate.py
Normal file
140
homeassistant/components/switchbot/climate.py
Normal file
@@ -0,0 +1,140 @@
|
||||
"""Support for Switchbot Climate devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import switchbot
|
||||
from switchbot import (
|
||||
ClimateAction as SwitchBotClimateAction,
|
||||
ClimateMode as SwitchBotClimateMode,
|
||||
)
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import SwitchbotConfigEntry
|
||||
from .entity import SwitchbotEntity, exception_handler
|
||||
|
||||
SWITCHBOT_CLIMATE_TO_HASS_HVAC_MODE = {
|
||||
SwitchBotClimateMode.HEAT: HVACMode.HEAT,
|
||||
SwitchBotClimateMode.OFF: HVACMode.OFF,
|
||||
}
|
||||
|
||||
HASS_HVAC_MODE_TO_SWITCHBOT_CLIMATE = {
|
||||
HVACMode.HEAT: SwitchBotClimateMode.HEAT,
|
||||
HVACMode.OFF: SwitchBotClimateMode.OFF,
|
||||
}
|
||||
|
||||
SWITCHBOT_ACTION_TO_HASS_HVAC_ACTION = {
|
||||
SwitchBotClimateAction.HEATING: HVACAction.HEATING,
|
||||
SwitchBotClimateAction.IDLE: HVACAction.IDLE,
|
||||
SwitchBotClimateAction.OFF: HVACAction.OFF,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: SwitchbotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Switchbot climate based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities([SwitchBotClimateEntity(coordinator)])
|
||||
|
||||
|
||||
class SwitchBotClimateEntity(SwitchbotEntity, ClimateEntity):
|
||||
"""Representation of a Switchbot Climate device."""
|
||||
|
||||
_device: switchbot.SwitchbotDevice
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.PRESET_MODE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_target_temperature_step = 0.5
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = "climate"
|
||||
_attr_name = None
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature."""
|
||||
return self._device.min_temperature
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum temperature."""
|
||||
return self._device.max_temperature
|
||||
|
||||
@property
|
||||
def preset_modes(self) -> list[str] | None:
|
||||
"""Return the list of available preset modes."""
|
||||
return self._device.preset_modes
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
return self._device.preset_mode
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
return SWITCHBOT_CLIMATE_TO_HASS_HVAC_MODE.get(
|
||||
self._device.hvac_mode, HVACMode.OFF
|
||||
)
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return the list of available HVAC modes."""
|
||||
return [
|
||||
SWITCHBOT_CLIMATE_TO_HASS_HVAC_MODE[mode]
|
||||
for mode in self._device.hvac_modes
|
||||
]
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current HVAC action."""
|
||||
return SWITCHBOT_ACTION_TO_HASS_HVAC_ACTION.get(
|
||||
self._device.hvac_action, HVACAction.OFF
|
||||
)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._device.current_temperature
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
return self._device.target_temperature
|
||||
|
||||
@exception_handler
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new HVAC mode."""
|
||||
return await self._device.set_hvac_mode(
|
||||
HASS_HVAC_MODE_TO_SWITCHBOT_CLIMATE[hvac_mode]
|
||||
)
|
||||
|
||||
@exception_handler
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
return await self._device.set_preset_mode(preset_mode)
|
||||
|
||||
@exception_handler
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temperature = kwargs.get(ATTR_TEMPERATURE)
|
||||
return await self._device.set_target_temperature(temperature)
|
||||
@@ -58,6 +58,8 @@ class SupportedModels(StrEnum):
|
||||
K11_PLUS_VACUUM = "k11+_vacuum"
|
||||
GARAGE_DOOR_OPENER = "garage_door_opener"
|
||||
CLIMATE_PANEL = "climate_panel"
|
||||
SMART_THERMOSTAT_RADIATOR = "smart_thermostat_radiator"
|
||||
S20_VACUUM = "s20_vacuum"
|
||||
|
||||
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -78,6 +80,7 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
SwitchbotModel.CIRCULATOR_FAN: SupportedModels.CIRCULATOR_FAN,
|
||||
SwitchbotModel.K20_VACUUM: SupportedModels.K20_VACUUM,
|
||||
SwitchbotModel.S10_VACUUM: SupportedModels.S10_VACUUM,
|
||||
SwitchbotModel.S20_VACUUM: SupportedModels.S20_VACUUM,
|
||||
SwitchbotModel.K10_VACUUM: SupportedModels.K10_VACUUM,
|
||||
SwitchbotModel.K10_PRO_VACUUM: SupportedModels.K10_PRO_VACUUM,
|
||||
SwitchbotModel.K10_PRO_COMBO_VACUUM: SupportedModels.K10_PRO_COMBO_VACUUM,
|
||||
@@ -95,6 +98,7 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
SwitchbotModel.K11_VACUUM: SupportedModels.K11_PLUS_VACUUM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER: SupportedModels.GARAGE_DOOR_OPENER,
|
||||
SwitchbotModel.CLIMATE_PANEL: SupportedModels.CLIMATE_PANEL,
|
||||
SwitchbotModel.SMART_THERMOSTAT_RADIATOR: SupportedModels.SMART_THERMOSTAT_RADIATOR,
|
||||
}
|
||||
|
||||
NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -132,6 +136,7 @@ ENCRYPTED_MODELS = {
|
||||
SwitchbotModel.PLUG_MINI_EU,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER,
|
||||
SwitchbotModel.SMART_THERMOSTAT_RADIATOR,
|
||||
}
|
||||
|
||||
ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
@@ -153,6 +158,7 @@ ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
SwitchbotModel.PLUG_MINI_EU: switchbot.SwitchbotRelaySwitch,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM: switchbot.SwitchbotRelaySwitch2PM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER: switchbot.SwitchbotRelaySwitch,
|
||||
SwitchbotModel.SMART_THERMOSTAT_RADIATOR: switchbot.SwitchbotSmartThermostatRadiator,
|
||||
}
|
||||
|
||||
HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = {
|
||||
|
||||
@@ -1,5 +1,18 @@
|
||||
{
|
||||
"entity": {
|
||||
"climate": {
|
||||
"climate": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"manual": "mdi:hand-back-right",
|
||||
"off": "mdi:hvac-off",
|
||||
"schedule": "mdi:calendar-clock"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"air_purifier": {
|
||||
"default": "mdi:air-purifier",
|
||||
|
||||
@@ -41,5 +41,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["switchbot"],
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["PySwitchbot==0.72.1"]
|
||||
"requirements": ["PySwitchbot==0.73.0"]
|
||||
}
|
||||
|
||||
@@ -100,6 +100,19 @@
|
||||
"name": "Unlocked alarm"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"climate": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"manual": "[%key:common::state::manual%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"schedule": "Schedule"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cover": {
|
||||
"cover": {
|
||||
"state_attributes": {
|
||||
|
||||
@@ -84,6 +84,7 @@
|
||||
"abort": {
|
||||
"already_configured": "Chat already configured"
|
||||
},
|
||||
"entry_type": "Allowed chat ID",
|
||||
"error": {
|
||||
"chat_not_found": "Chat not found"
|
||||
},
|
||||
|
||||
@@ -181,15 +181,14 @@ class TPLinkClimateEntity(CoordinatedTPLinkModuleEntity, ClimateEntity):
|
||||
HVACMode.HEAT if self._thermostat_module.state else HVACMode.OFF
|
||||
)
|
||||
|
||||
if (
|
||||
self._thermostat_module.mode not in STATE_TO_ACTION
|
||||
and self._attr_hvac_action is not HVACAction.OFF
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Unknown thermostat state, defaulting to OFF: %s",
|
||||
self._thermostat_module.mode,
|
||||
)
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
if self._thermostat_module.mode not in STATE_TO_ACTION:
|
||||
# Report a warning on the first non-default unknown mode
|
||||
if self._attr_hvac_action is not HVACAction.OFF:
|
||||
_LOGGER.warning(
|
||||
"Unknown thermostat state, defaulting to OFF: %s",
|
||||
self._thermostat_module.mode,
|
||||
)
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
return True
|
||||
|
||||
self._attr_hvac_action = STATE_TO_ACTION[self._thermostat_module.mode]
|
||||
|
||||
@@ -2,13 +2,14 @@
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, selector
|
||||
|
||||
from .const import (
|
||||
@@ -23,7 +24,7 @@ from .const import (
|
||||
SERVICE_START_TORRENT,
|
||||
SERVICE_STOP_TORRENT,
|
||||
)
|
||||
from .coordinator import TransmissionConfigEntry, TransmissionDataUpdateCoordinator
|
||||
from .coordinator import TransmissionDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -67,45 +68,52 @@ SERVICE_STOP_TORRENT_SCHEMA = vol.All(
|
||||
|
||||
|
||||
def _get_coordinator_from_service_data(
|
||||
hass: HomeAssistant, entry_id: str
|
||||
call: ServiceCall,
|
||||
) -> TransmissionDataUpdateCoordinator:
|
||||
"""Return coordinator for entry id."""
|
||||
entry: TransmissionConfigEntry | None = hass.config_entries.async_get_entry(
|
||||
entry_id
|
||||
)
|
||||
if entry is None or entry.state is not ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(f"Config entry {entry_id} is not found or not loaded")
|
||||
return entry.runtime_data
|
||||
config_entry_id: str = call.data[CONF_ENTRY_ID]
|
||||
if not (entry := call.hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": entry.title},
|
||||
)
|
||||
return cast(TransmissionDataUpdateCoordinator, entry.runtime_data)
|
||||
|
||||
|
||||
async def _async_add_torrent(service: ServiceCall) -> None:
|
||||
"""Add new torrent to download."""
|
||||
entry_id: str = service.data[CONF_ENTRY_ID]
|
||||
coordinator = _get_coordinator_from_service_data(service.hass, entry_id)
|
||||
coordinator = _get_coordinator_from_service_data(service)
|
||||
torrent: str = service.data[ATTR_TORRENT]
|
||||
download_path: str | None = service.data.get(ATTR_DOWNLOAD_PATH)
|
||||
if torrent.startswith(
|
||||
("http", "ftp:", "magnet:")
|
||||
) or service.hass.config.is_allowed_path(torrent):
|
||||
if download_path:
|
||||
await service.hass.async_add_executor_job(
|
||||
partial(
|
||||
coordinator.api.add_torrent, torrent, download_dir=download_path
|
||||
)
|
||||
)
|
||||
else:
|
||||
await service.hass.async_add_executor_job(
|
||||
coordinator.api.add_torrent, torrent
|
||||
)
|
||||
await coordinator.async_request_refresh()
|
||||
|
||||
if not (
|
||||
torrent.startswith(("http", "ftp:", "magnet:"))
|
||||
or service.hass.config.is_allowed_path(torrent)
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="could_not_add_torrent",
|
||||
)
|
||||
|
||||
if download_path:
|
||||
await service.hass.async_add_executor_job(
|
||||
partial(coordinator.api.add_torrent, torrent, download_dir=download_path)
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning("Could not add torrent: unsupported type or no permission")
|
||||
await service.hass.async_add_executor_job(coordinator.api.add_torrent, torrent)
|
||||
await coordinator.async_request_refresh()
|
||||
|
||||
|
||||
async def _async_start_torrent(service: ServiceCall) -> None:
|
||||
"""Start torrent."""
|
||||
entry_id: str = service.data[CONF_ENTRY_ID]
|
||||
coordinator = _get_coordinator_from_service_data(service.hass, entry_id)
|
||||
coordinator = _get_coordinator_from_service_data(service)
|
||||
torrent_id = service.data[CONF_ID]
|
||||
await service.hass.async_add_executor_job(coordinator.api.start_torrent, torrent_id)
|
||||
await coordinator.async_request_refresh()
|
||||
@@ -113,8 +121,7 @@ async def _async_start_torrent(service: ServiceCall) -> None:
|
||||
|
||||
async def _async_stop_torrent(service: ServiceCall) -> None:
|
||||
"""Stop torrent."""
|
||||
entry_id: str = service.data[CONF_ENTRY_ID]
|
||||
coordinator = _get_coordinator_from_service_data(service.hass, entry_id)
|
||||
coordinator = _get_coordinator_from_service_data(service)
|
||||
torrent_id = service.data[CONF_ID]
|
||||
await service.hass.async_add_executor_job(coordinator.api.stop_torrent, torrent_id)
|
||||
await coordinator.async_request_refresh()
|
||||
@@ -122,8 +129,7 @@ async def _async_stop_torrent(service: ServiceCall) -> None:
|
||||
|
||||
async def _async_remove_torrent(service: ServiceCall) -> None:
|
||||
"""Remove torrent."""
|
||||
entry_id: str = service.data[CONF_ENTRY_ID]
|
||||
coordinator = _get_coordinator_from_service_data(service.hass, entry_id)
|
||||
coordinator = _get_coordinator_from_service_data(service)
|
||||
torrent_id = service.data[CONF_ID]
|
||||
delete_data = service.data[ATTR_DELETE_DATA]
|
||||
await service.hass.async_add_executor_job(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
add_torrent:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: transmission
|
||||
@@ -18,6 +19,7 @@ add_torrent:
|
||||
remove_torrent:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: transmission
|
||||
@@ -27,6 +29,7 @@ remove_torrent:
|
||||
selector:
|
||||
text:
|
||||
delete_data:
|
||||
required: true
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
@@ -34,17 +37,20 @@ remove_torrent:
|
||||
start_torrent:
|
||||
fields:
|
||||
entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: transmission
|
||||
id:
|
||||
example: 123
|
||||
selector:
|
||||
text:
|
||||
|
||||
stop_torrent:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: transmission
|
||||
id:
|
||||
required: true
|
||||
example: 123
|
||||
selector:
|
||||
text:
|
||||
|
||||
stop_torrent:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: transmission
|
||||
|
||||
@@ -87,6 +87,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"could_not_add_torrent": {
|
||||
"message": "Could not add torrent: unsupported type or no permission."
|
||||
},
|
||||
"integration_not_found": {
|
||||
"message": "Integration \"{target}\" not found in registry."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
|
||||
@@ -19,9 +19,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
|
||||
from .entity import TuyaEntity
|
||||
from .models import EnumTypeData, find_dpcode
|
||||
from .models import DPCodeEnumWrapper
|
||||
from .util import get_dpcode
|
||||
|
||||
|
||||
@@ -85,9 +85,21 @@ async def async_setup_entry(
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := ALARM.get(device.category):
|
||||
entities.extend(
|
||||
TuyaAlarmEntity(device, manager, description)
|
||||
TuyaAlarmEntity(
|
||||
device,
|
||||
manager,
|
||||
description,
|
||||
action_dpcode_wrapper=action_dpcode_wrapper,
|
||||
state_dpcode_wrapper=DPCodeEnumWrapper.find_dpcode(
|
||||
device, description.master_state
|
||||
),
|
||||
)
|
||||
for description in descriptions
|
||||
if description.key in device.status
|
||||
if (
|
||||
action_dpcode_wrapper := DPCodeEnumWrapper.find_dpcode(
|
||||
device, description.key, prefer_function=True
|
||||
)
|
||||
)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -103,7 +115,6 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
|
||||
|
||||
_attr_name = None
|
||||
_attr_code_arm_required = False
|
||||
_master_state: EnumTypeData | None = None
|
||||
_alarm_msg_dpcode: DPCode | None = None
|
||||
|
||||
def __init__(
|
||||
@@ -111,33 +122,24 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
|
||||
device: CustomerDevice,
|
||||
device_manager: Manager,
|
||||
description: TuyaAlarmControlPanelEntityDescription,
|
||||
*,
|
||||
action_dpcode_wrapper: DPCodeEnumWrapper,
|
||||
state_dpcode_wrapper: DPCodeEnumWrapper | None,
|
||||
) -> None:
|
||||
"""Init Tuya Alarm."""
|
||||
super().__init__(device, device_manager)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._action_dpcode_wrapper = action_dpcode_wrapper
|
||||
self._state_dpcode_wrapper = state_dpcode_wrapper
|
||||
|
||||
# Determine supported modes
|
||||
if supported_modes := find_dpcode(
|
||||
self.device, description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
if Mode.HOME in supported_modes.range:
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_HOME
|
||||
|
||||
if Mode.ARM in supported_modes.range:
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
if Mode.SOS in supported_modes.range:
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.TRIGGER
|
||||
|
||||
# Determine master state
|
||||
if enum_type := find_dpcode(
|
||||
self.device,
|
||||
description.master_state,
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
):
|
||||
self._master_state = enum_type
|
||||
if Mode.HOME in action_dpcode_wrapper.type_information.range:
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_HOME
|
||||
if Mode.ARM in action_dpcode_wrapper.type_information.range:
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
if Mode.SOS in action_dpcode_wrapper.type_information.range:
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.TRIGGER
|
||||
|
||||
# Determine alarm message
|
||||
if dp_code := get_dpcode(self.device, description.alarm_msg):
|
||||
@@ -149,8 +151,8 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
|
||||
# When the alarm is triggered, only its 'state' is changing. From 'normal' to 'alarm'.
|
||||
# The 'mode' doesn't change, and stays as 'arm' or 'home'.
|
||||
if (
|
||||
self._master_state is not None
|
||||
and self.device.status.get(self._master_state.dpcode) == State.ALARM
|
||||
self._state_dpcode_wrapper is not None
|
||||
and self.device.status.get(self._state_dpcode_wrapper.dpcode) == State.ALARM
|
||||
):
|
||||
# Only report as triggered if NOT a battery warning
|
||||
if (
|
||||
@@ -166,28 +168,26 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
|
||||
def changed_by(self) -> str | None:
|
||||
"""Last change triggered by."""
|
||||
if (
|
||||
self._master_state is not None
|
||||
self._state_dpcode_wrapper is not None
|
||||
and self._alarm_msg_dpcode is not None
|
||||
and self.device.status.get(self._master_state.dpcode) == State.ALARM
|
||||
and self.device.status.get(self._state_dpcode_wrapper.dpcode) == State.ALARM
|
||||
and (encoded_msg := self.device.status.get(self._alarm_msg_dpcode))
|
||||
):
|
||||
return b64decode(encoded_msg).decode("utf-16be")
|
||||
return None
|
||||
|
||||
def alarm_disarm(self, code: str | None = None) -> None:
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send Disarm command."""
|
||||
self._send_command(
|
||||
[{"code": self.entity_description.key, "value": Mode.DISARMED}]
|
||||
)
|
||||
await self._async_send_dpcode_update(self._action_dpcode_wrapper, Mode.DISARMED)
|
||||
|
||||
def alarm_arm_home(self, code: str | None = None) -> None:
|
||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send Home command."""
|
||||
self._send_command([{"code": self.entity_description.key, "value": Mode.HOME}])
|
||||
await self._async_send_dpcode_update(self._action_dpcode_wrapper, Mode.HOME)
|
||||
|
||||
def alarm_arm_away(self, code: str | None = None) -> None:
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send Arm command."""
|
||||
self._send_command([{"code": self.entity_description.key, "value": Mode.ARM}])
|
||||
await self._async_send_dpcode_update(self._action_dpcode_wrapper, Mode.ARM)
|
||||
|
||||
def alarm_trigger(self, code: str | None = None) -> None:
|
||||
async def async_alarm_trigger(self, code: str | None = None) -> None:
|
||||
"""Send SOS command."""
|
||||
self._send_command([{"code": self.entity_description.key, "value": Mode.SOS}])
|
||||
await self._async_send_dpcode_update(self._action_dpcode_wrapper, Mode.SOS)
|
||||
|
||||
@@ -15,11 +15,11 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
|
||||
from .entity import TuyaEntity
|
||||
from .models import DPCodeBitmapBitWrapper, DPCodeBooleanWrapper, DPCodeWrapper
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -366,20 +366,48 @@ BINARY_SENSORS: dict[DeviceCategory, tuple[TuyaBinarySensorEntityDescription, ..
|
||||
}
|
||||
|
||||
|
||||
def _get_bitmap_bit_mask(
|
||||
device: CustomerDevice, dpcode: str, bitmap_key: str | None
|
||||
) -> int | None:
|
||||
"""Get the bit mask for a given bitmap description."""
|
||||
if (
|
||||
bitmap_key is None
|
||||
or (status_range := device.status_range.get(dpcode)) is None
|
||||
or status_range.type != DPType.BITMAP
|
||||
or not isinstance(bitmap_values := json_loads(status_range.values), dict)
|
||||
or not isinstance(bitmap_labels := bitmap_values.get("label"), list)
|
||||
or bitmap_key not in bitmap_labels
|
||||
):
|
||||
class _CustomDPCodeWrapper(DPCodeWrapper):
|
||||
"""Custom DPCode Wrapper to check for values in a set."""
|
||||
|
||||
_valid_values: set[bool | float | int | str]
|
||||
|
||||
def __init__(
|
||||
self, dpcode: str, valid_values: set[bool | float | int | str]
|
||||
) -> None:
|
||||
"""Init CustomDPCodeBooleanWrapper."""
|
||||
super().__init__(dpcode)
|
||||
self._valid_values = valid_values
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> bool | None:
|
||||
"""Read the device value for the dpcode."""
|
||||
if (raw_value := self._read_device_status_raw(device)) is None:
|
||||
return None
|
||||
return raw_value in self._valid_values
|
||||
|
||||
|
||||
def _get_dpcode_wrapper(
|
||||
device: CustomerDevice,
|
||||
description: TuyaBinarySensorEntityDescription,
|
||||
) -> DPCodeWrapper | None:
|
||||
"""Get DPCode wrapper for an entity description."""
|
||||
dpcode = description.dpcode or description.key
|
||||
if description.bitmap_key is not None:
|
||||
return DPCodeBitmapBitWrapper.find_dpcode(
|
||||
device, dpcode, bitmap_key=description.bitmap_key
|
||||
)
|
||||
|
||||
if bool_type := DPCodeBooleanWrapper.find_dpcode(device, dpcode):
|
||||
return bool_type
|
||||
|
||||
# Legacy / compatibility
|
||||
if dpcode not in device.status:
|
||||
return None
|
||||
return bitmap_labels.index(bitmap_key)
|
||||
return _CustomDPCodeWrapper(
|
||||
dpcode,
|
||||
description.on_value
|
||||
if isinstance(description.on_value, set)
|
||||
else {description.on_value},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -397,25 +425,11 @@ async def async_setup_entry(
|
||||
for device_id in device_ids:
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := BINARY_SENSORS.get(device.category):
|
||||
for description in descriptions:
|
||||
dpcode = description.dpcode or description.key
|
||||
if dpcode in device.status:
|
||||
mask = _get_bitmap_bit_mask(
|
||||
device, dpcode, description.bitmap_key
|
||||
)
|
||||
|
||||
if (
|
||||
description.bitmap_key is None # Regular binary sensor
|
||||
or mask is not None # Bitmap sensor with valid mask
|
||||
):
|
||||
entities.append(
|
||||
TuyaBinarySensorEntity(
|
||||
device,
|
||||
manager,
|
||||
description,
|
||||
mask,
|
||||
)
|
||||
)
|
||||
entities.extend(
|
||||
TuyaBinarySensorEntity(device, manager, description, dpcode_wrapper)
|
||||
for description in descriptions
|
||||
if (dpcode_wrapper := _get_dpcode_wrapper(device, description))
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -436,26 +450,15 @@ class TuyaBinarySensorEntity(TuyaEntity, BinarySensorEntity):
|
||||
device: CustomerDevice,
|
||||
device_manager: Manager,
|
||||
description: TuyaBinarySensorEntityDescription,
|
||||
bit_mask: int | None = None,
|
||||
dpcode_wrapper: DPCodeWrapper,
|
||||
) -> None:
|
||||
"""Init Tuya binary sensor."""
|
||||
super().__init__(device, device_manager)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._bit_mask = bit_mask
|
||||
self._dpcode_wrapper = dpcode_wrapper
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if sensor is on."""
|
||||
dpcode = self.entity_description.dpcode or self.entity_description.key
|
||||
if dpcode not in self.device.status:
|
||||
return False
|
||||
|
||||
if self._bit_mask is not None:
|
||||
# For bitmap sensors, check the specific bit mask
|
||||
return (self.device.status[dpcode] & (1 << self._bit_mask)) != 0
|
||||
|
||||
if isinstance(self.entity_description.on_value, set):
|
||||
return self.device.status[dpcode] in self.entity_description.on_value
|
||||
|
||||
return self.device.status[dpcode] == self.entity_description.on_value
|
||||
return self._dpcode_wrapper.read_device_status(self.device)
|
||||
|
||||
@@ -13,6 +13,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
|
||||
from .entity import TuyaEntity
|
||||
from .models import DPCodeBooleanWrapper
|
||||
|
||||
BUTTONS: dict[DeviceCategory, tuple[ButtonEntityDescription, ...]] = {
|
||||
DeviceCategory.HXD: (
|
||||
@@ -21,6 +22,19 @@ BUTTONS: dict[DeviceCategory, tuple[ButtonEntityDescription, ...]] = {
|
||||
translation_key="snooze",
|
||||
),
|
||||
),
|
||||
DeviceCategory.MSP: (
|
||||
ButtonEntityDescription(
|
||||
key=DPCode.FACTORY_RESET,
|
||||
translation_key="factory_reset",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ButtonEntityDescription(
|
||||
key=DPCode.MANUAL_CLEAN,
|
||||
translation_key="manual_clean",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
DeviceCategory.SD: (
|
||||
ButtonEntityDescription(
|
||||
key=DPCode.RESET_DUSTER_CLOTH,
|
||||
@@ -67,9 +81,13 @@ async def async_setup_entry(
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := BUTTONS.get(device.category):
|
||||
entities.extend(
|
||||
TuyaButtonEntity(device, manager, description)
|
||||
TuyaButtonEntity(device, manager, description, dpcode_wrapper)
|
||||
for description in descriptions
|
||||
if description.key in device.status
|
||||
if (
|
||||
dpcode_wrapper := DPCodeBooleanWrapper.find_dpcode(
|
||||
device, description.key, prefer_function=True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
@@ -89,12 +107,14 @@ class TuyaButtonEntity(TuyaEntity, ButtonEntity):
|
||||
device: CustomerDevice,
|
||||
device_manager: Manager,
|
||||
description: ButtonEntityDescription,
|
||||
dpcode_wrapper: DPCodeBooleanWrapper,
|
||||
) -> None:
|
||||
"""Init Tuya button."""
|
||||
super().__init__(device, device_manager)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._dpcode_wrapper = dpcode_wrapper
|
||||
|
||||
def press(self) -> None:
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
self._send_command([{"code": self.entity_description.key, "value": True}])
|
||||
await self._async_send_dpcode_update(self._dpcode_wrapper, True)
|
||||
|
||||
@@ -704,6 +704,7 @@ class DPCode(StrEnum):
|
||||
DECIBEL_SWITCH = "decibel_switch"
|
||||
DEHUMIDITY_SET_ENUM = "dehumidify_set_enum"
|
||||
DEHUMIDITY_SET_VALUE = "dehumidify_set_value"
|
||||
DELAY_CLEAN_TIME = "delay_clean_time"
|
||||
DELAY_SET = "delay_set"
|
||||
DEW_POINT_TEMP = "dew_point_temp"
|
||||
DISINFECTION = "disinfection"
|
||||
@@ -717,6 +718,7 @@ class DPCode(StrEnum):
|
||||
ELECTRICITY_LEFT = "electricity_left"
|
||||
EXCRETION_TIME_DAY = "excretion_time_day"
|
||||
EXCRETION_TIMES_DAY = "excretion_times_day"
|
||||
FACTORY_RESET = "factory_reset"
|
||||
FAN_BEEP = "fan_beep" # Sound
|
||||
FAN_COOL = "fan_cool" # Cool wind
|
||||
FAN_DIRECTION = "fan_direction" # Fan direction
|
||||
@@ -773,6 +775,7 @@ class DPCode(StrEnum):
|
||||
LIQUID_STATE = "liquid_state"
|
||||
LOCK = "lock" # Lock / Child lock
|
||||
MACH_OPERATE = "mach_operate"
|
||||
MANUAL_CLEAN = "manual_clean"
|
||||
MANUAL_FEED = "manual_feed"
|
||||
MASTER_MODE = "master_mode" # alarm mode
|
||||
MASTER_STATE = "master_state" # alarm state
|
||||
|
||||
@@ -240,6 +240,13 @@ LIGHTS: dict[DeviceCategory, tuple[TuyaLightEntityDescription, ...]] = {
|
||||
color_data=DPCode.COLOUR_DATA,
|
||||
),
|
||||
),
|
||||
DeviceCategory.MSP: (
|
||||
TuyaLightEntityDescription(
|
||||
key=DPCode.LIGHT,
|
||||
translation_key="light",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
DeviceCategory.QJDCZ: (
|
||||
TuyaLightEntityDescription(
|
||||
key=DPCode.SWITCH_LED,
|
||||
|
||||
@@ -22,17 +22,18 @@ class TypeInformation:
|
||||
As provided by the SDK, from `device.function` / `device.status_range`.
|
||||
"""
|
||||
|
||||
dpcode: DPCode
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> Self | None:
|
||||
"""Load JSON string and return a TypeInformation object."""
|
||||
raise NotImplementedError("from_json is not implemented for this type")
|
||||
return cls(dpcode)
|
||||
|
||||
|
||||
@dataclass
|
||||
class IntegerTypeData(TypeInformation):
|
||||
"""Integer Type Data."""
|
||||
|
||||
dpcode: DPCode
|
||||
min: int
|
||||
max: int
|
||||
scale: float
|
||||
@@ -100,11 +101,24 @@ class IntegerTypeData(TypeInformation):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BitmapTypeInformation(TypeInformation):
|
||||
"""Bitmap type information."""
|
||||
|
||||
label: list[str]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> Self | None:
|
||||
"""Load JSON string and return a BitmapTypeInformation object."""
|
||||
if not (parsed := json.loads(data)):
|
||||
return None
|
||||
return cls(dpcode, **parsed)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnumTypeData(TypeInformation):
|
||||
"""Enum Type Data."""
|
||||
|
||||
dpcode: DPCode
|
||||
range: list[str]
|
||||
|
||||
@classmethod
|
||||
@@ -116,6 +130,8 @@ class EnumTypeData(TypeInformation):
|
||||
|
||||
|
||||
_TYPE_INFORMATION_MAPPINGS: dict[DPType, type[TypeInformation]] = {
|
||||
DPType.BITMAP: BitmapTypeInformation,
|
||||
DPType.BOOLEAN: TypeInformation,
|
||||
DPType.ENUM: EnumTypeData,
|
||||
DPType.INTEGER: IntegerTypeData,
|
||||
}
|
||||
@@ -146,13 +162,13 @@ class DPCodeWrapper(ABC):
|
||||
The raw device status is converted to a Home Assistant value.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _convert_value_to_raw_value(self, device: CustomerDevice, value: Any) -> Any:
|
||||
"""Convert a Home Assistant value back to a raw device value.
|
||||
|
||||
This is called by `get_update_command` to prepare the value for sending
|
||||
back to the device, and should be implemented in concrete classes.
|
||||
back to the device, and should be implemented in concrete classes if needed.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_update_command(self, device: CustomerDevice, value: Any) -> dict[str, Any]:
|
||||
"""Get the update command for the dpcode.
|
||||
@@ -165,29 +181,6 @@ class DPCodeWrapper(ABC):
|
||||
}
|
||||
|
||||
|
||||
class DPCodeBooleanWrapper(DPCodeWrapper):
|
||||
"""Simple wrapper for boolean values.
|
||||
|
||||
Supports True/False only.
|
||||
"""
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> bool | None:
|
||||
"""Read the device value for the dpcode."""
|
||||
if (raw_value := self._read_device_status_raw(device)) in (True, False):
|
||||
return raw_value
|
||||
return None
|
||||
|
||||
def _convert_value_to_raw_value(
|
||||
self, device: CustomerDevice, value: Any
|
||||
) -> Any | None:
|
||||
"""Convert a Home Assistant value back to a raw device value."""
|
||||
if value in (True, False):
|
||||
return value
|
||||
# Currently only called with boolean values
|
||||
# Safety net in case of future changes
|
||||
raise ValueError(f"Invalid boolean value `{value}`")
|
||||
|
||||
|
||||
class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
|
||||
"""Base DPCode wrapper with Type Information."""
|
||||
|
||||
@@ -203,7 +196,7 @@ class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
|
||||
def find_dpcode(
|
||||
cls,
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...],
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
) -> Self | None:
|
||||
@@ -217,6 +210,31 @@ class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
|
||||
return None
|
||||
|
||||
|
||||
class DPCodeBooleanWrapper(DPCodeTypeInformationWrapper[TypeInformation]):
|
||||
"""Simple wrapper for boolean values.
|
||||
|
||||
Supports True/False only.
|
||||
"""
|
||||
|
||||
DPTYPE = DPType.BOOLEAN
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> bool | None:
|
||||
"""Read the device value for the dpcode."""
|
||||
if (raw_value := self._read_device_status_raw(device)) in (True, False):
|
||||
return raw_value
|
||||
return None
|
||||
|
||||
def _convert_value_to_raw_value(
|
||||
self, device: CustomerDevice, value: Any
|
||||
) -> Any | None:
|
||||
"""Convert a Home Assistant value back to a raw device value."""
|
||||
if value in (True, False):
|
||||
return value
|
||||
# Currently only called with boolean values
|
||||
# Safety net in case of future changes
|
||||
raise ValueError(f"Invalid boolean value `{value}`")
|
||||
|
||||
|
||||
class DPCodeEnumWrapper(DPCodeTypeInformationWrapper[EnumTypeData]):
|
||||
"""Simple wrapper for EnumTypeData values."""
|
||||
|
||||
@@ -272,6 +290,48 @@ class DPCodeIntegerWrapper(DPCodeTypeInformationWrapper[IntegerTypeData]):
|
||||
)
|
||||
|
||||
|
||||
class DPCodeBitmapBitWrapper(DPCodeWrapper):
|
||||
"""Simple wrapper for a specific bit in bitmap values."""
|
||||
|
||||
def __init__(self, dpcode: str, mask: int) -> None:
|
||||
"""Init DPCodeBitmapWrapper."""
|
||||
super().__init__(dpcode)
|
||||
self._mask = mask
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> bool | None:
|
||||
"""Read the device value for the dpcode."""
|
||||
if (raw_value := self._read_device_status_raw(device)) is None:
|
||||
return None
|
||||
return (raw_value & (1 << self._mask)) != 0
|
||||
|
||||
@classmethod
|
||||
def find_dpcode(
|
||||
cls,
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...],
|
||||
*,
|
||||
bitmap_key: str,
|
||||
) -> Self | None:
|
||||
"""Find and return a DPCodeBitmapBitWrapper for the given DP codes."""
|
||||
if (
|
||||
type_information := find_dpcode(device, dpcodes, dptype=DPType.BITMAP)
|
||||
) and bitmap_key in type_information.label:
|
||||
return cls(
|
||||
type_information.dpcode, type_information.label.index(bitmap_key)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.BITMAP],
|
||||
) -> BitmapTypeInformation | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
|
||||
@@ -180,6 +180,14 @@ NUMBERS: dict[DeviceCategory, tuple[NumberEntityDescription, ...]] = {
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
DeviceCategory.MSP: (
|
||||
NumberEntityDescription(
|
||||
key=DPCode.DELAY_CLEAN_TIME,
|
||||
translation_key="delay_clean_time",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
DeviceCategory.MZJ: (
|
||||
NumberEntityDescription(
|
||||
key=DPCode.COOK_TEMPERATURE,
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
|
||||
from .entity import TuyaEntity
|
||||
from .models import DPCodeBooleanWrapper
|
||||
|
||||
SIRENS: dict[DeviceCategory, tuple[SirenEntityDescription, ...]] = {
|
||||
DeviceCategory.CO2BJ: (
|
||||
@@ -64,9 +65,13 @@ async def async_setup_entry(
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := SIRENS.get(device.category):
|
||||
entities.extend(
|
||||
TuyaSirenEntity(device, manager, description)
|
||||
TuyaSirenEntity(device, manager, description, dpcode_wrapper)
|
||||
for description in descriptions
|
||||
if description.key in device.status
|
||||
if (
|
||||
dpcode_wrapper := DPCodeBooleanWrapper.find_dpcode(
|
||||
device, description.key, prefer_function=True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
@@ -89,21 +94,23 @@ class TuyaSirenEntity(TuyaEntity, SirenEntity):
|
||||
device: CustomerDevice,
|
||||
device_manager: Manager,
|
||||
description: SirenEntityDescription,
|
||||
dpcode_wrapper: DPCodeBooleanWrapper,
|
||||
) -> None:
|
||||
"""Init Tuya Siren."""
|
||||
super().__init__(device, device_manager)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._dpcode_wrapper = dpcode_wrapper
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if siren is on."""
|
||||
return self.device.status.get(self.entity_description.key, False)
|
||||
return self._dpcode_wrapper.read_device_status(self.device)
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the siren on."""
|
||||
self._send_command([{"code": self.entity_description.key, "value": True}])
|
||||
await self._async_send_dpcode_update(self._dpcode_wrapper, True)
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the siren off."""
|
||||
self._send_command([{"code": self.entity_description.key, "value": False}])
|
||||
await self._async_send_dpcode_update(self._dpcode_wrapper, False)
|
||||
|
||||
@@ -77,6 +77,12 @@
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"factory_reset": {
|
||||
"name": "Factory reset"
|
||||
},
|
||||
"manual_clean": {
|
||||
"name": "Manual clean"
|
||||
},
|
||||
"reset_duster_cloth": {
|
||||
"name": "Reset duster cloth"
|
||||
},
|
||||
@@ -166,6 +172,9 @@
|
||||
"cook_time": {
|
||||
"name": "Cooking time"
|
||||
},
|
||||
"delay_clean_time": {
|
||||
"name": "Delay clean time"
|
||||
},
|
||||
"down_delay": {
|
||||
"name": "Down delay"
|
||||
},
|
||||
|
||||
@@ -946,14 +946,13 @@ async def async_setup_entry(
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := SWITCHES.get(device.category):
|
||||
entities.extend(
|
||||
TuyaSwitchEntity(
|
||||
device,
|
||||
manager,
|
||||
description,
|
||||
DPCodeBooleanWrapper(description.key),
|
||||
)
|
||||
TuyaSwitchEntity(device, manager, description, dpcode_wrapper)
|
||||
for description in descriptions
|
||||
if description.key in device.status
|
||||
if (
|
||||
dpcode_wrapper := DPCodeBooleanWrapper.find_dpcode(
|
||||
device, description.key, prefer_function=True
|
||||
)
|
||||
)
|
||||
and _check_deprecation(
|
||||
hass,
|
||||
device,
|
||||
|
||||
@@ -94,14 +94,13 @@ async def async_setup_entry(
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := VALVES.get(device.category):
|
||||
entities.extend(
|
||||
TuyaValveEntity(
|
||||
device,
|
||||
manager,
|
||||
description,
|
||||
DPCodeBooleanWrapper(description.key),
|
||||
)
|
||||
TuyaValveEntity(device, manager, description, dpcode_wrapper)
|
||||
for description in descriptions
|
||||
if description.key in device.status
|
||||
if (
|
||||
dpcode_wrapper := DPCodeBooleanWrapper.find_dpcode(
|
||||
device, description.key, prefer_function=True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -2,13 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import aiohttp
|
||||
from uasiren.client import Client
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, CONF_REGION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import UkraineAlarmDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Ukraine Alarm as config entry."""
|
||||
@@ -30,3 +40,56 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug("Migrating from version %s", config_entry.version)
|
||||
|
||||
if config_entry.version == 1:
|
||||
# Version 1 had states as first-class selections
|
||||
# Version 2 only allows states w/o districts, districts and communities
|
||||
region_id = config_entry.data[CONF_REGION]
|
||||
|
||||
websession = async_get_clientsession(hass)
|
||||
try:
|
||||
regions_data = await Client(websession).get_regions()
|
||||
except (aiohttp.ClientError, TimeoutError) as err:
|
||||
_LOGGER.warning(
|
||||
"Could not migrate config entry %s: failed to fetch current regions: %s",
|
||||
config_entry.entry_id,
|
||||
err,
|
||||
)
|
||||
return False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(regions_data, dict)
|
||||
|
||||
state_with_districts = None
|
||||
for state in regions_data["states"]:
|
||||
if state["regionId"] == region_id and state.get("regionChildIds"):
|
||||
state_with_districts = state
|
||||
break
|
||||
|
||||
if state_with_districts:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_state_region_{config_entry.entry_id}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_state_region",
|
||||
translation_placeholders={
|
||||
"region_name": config_entry.data.get(CONF_NAME, region_id),
|
||||
},
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
hass.config_entries.async_update_entry(config_entry, version=2)
|
||||
_LOGGER.info("Migration to version %s successful", 2)
|
||||
return True
|
||||
|
||||
_LOGGER.error("Unknown version %s", config_entry.version)
|
||||
return False
|
||||
|
||||
@@ -21,7 +21,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Ukraine Alarm."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new UkraineAlarmConfigFlow."""
|
||||
@@ -112,7 +112,7 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return await self._async_finish_flow()
|
||||
|
||||
regions = {}
|
||||
if self.selected_region:
|
||||
if self.selected_region and step_id != "district":
|
||||
regions[self.selected_region["regionId"]] = self.selected_region[
|
||||
"regionName"
|
||||
]
|
||||
|
||||
@@ -13,19 +13,19 @@
|
||||
"data": {
|
||||
"region": "[%key:component::ukraine_alarm::config::step::user::data::region%]"
|
||||
},
|
||||
"description": "If you want to monitor not only state and district, choose its specific community"
|
||||
"description": "Choose the district you selected above or select a specific community within that district"
|
||||
},
|
||||
"district": {
|
||||
"data": {
|
||||
"region": "[%key:component::ukraine_alarm::config::step::user::data::region%]"
|
||||
},
|
||||
"description": "If you want to monitor not only state, choose its specific district"
|
||||
"description": "Choose a district to monitor within the selected state"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"region": "Region"
|
||||
},
|
||||
"description": "Choose state to monitor"
|
||||
"description": "Choose a state"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -50,5 +50,11 @@
|
||||
"name": "Urban fights"
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_state_region": {
|
||||
"description": "The region `{region_name}` is a state-level region, which is no longer supported. Please remove this integration entry and add it again, selecting a district or community instead of the entire state.",
|
||||
"title": "State-level region monitoring is no longer supported"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"velbus-protocol"
|
||||
],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["velbus-aio==2025.8.0"],
|
||||
"requirements": ["velbus-aio==2025.11.0"],
|
||||
"usb": [
|
||||
{
|
||||
"pid": "0B1B",
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
"""Support for VELUX KLF 200 devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyvlx import PyVLX, PyVLXException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
|
||||
from .const import DOMAIN, LOGGER, PLATFORMS
|
||||
|
||||
type VeluxConfigEntry = ConfigEntry[PyVLX]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: VeluxConfigEntry) -> bool:
|
||||
"""Set up the velux component."""
|
||||
host = entry.data[CONF_HOST]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
@@ -27,12 +30,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
entry.runtime_data = pyvlx
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, f"gateway_{entry.entry_id}")},
|
||||
name="KLF 200 Gateway",
|
||||
manufacturer="Velux",
|
||||
model="KLF 200",
|
||||
hw_version=(
|
||||
str(pyvlx.klf200.version.hardwareversion) if pyvlx.klf200.version else None
|
||||
),
|
||||
sw_version=(
|
||||
str(pyvlx.klf200.version.softwareversion) if pyvlx.klf200.version else None
|
||||
),
|
||||
)
|
||||
|
||||
async def on_hass_stop(event):
|
||||
"""Close connection when hass stops."""
|
||||
LOGGER.debug("Velux interface terminated")
|
||||
await pyvlx.disconnect()
|
||||
|
||||
async def async_reboot_gateway(service_call: ServiceCall) -> None:
|
||||
"""Reboot the gateway (deprecated - use button entity instead)."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_reboot_service",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_reboot_service",
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
)
|
||||
|
||||
await pyvlx.reboot_gateway()
|
||||
|
||||
entry.async_on_unload(
|
||||
@@ -46,6 +76,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: VeluxConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -24,14 +24,14 @@ SCAN_INTERVAL = timedelta(minutes=5) # Use standard polling
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: VeluxConfigEntry,
|
||||
config_entry: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up rain sensor(s) for Velux platform."""
|
||||
pyvlx = config.runtime_data
|
||||
pyvlx = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
VeluxRainSensor(node, config.entry_id)
|
||||
VeluxRainSensor(node, config_entry.entry_id)
|
||||
for node in pyvlx.nodes
|
||||
if isinstance(node, Window) and node.rain_sensor
|
||||
)
|
||||
|
||||
52
homeassistant/components/velux/button.py
Normal file
52
homeassistant/components/velux/button.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Support for VELUX KLF 200 gateway button."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyvlx import PyVLX, PyVLXException
|
||||
|
||||
from homeassistant.components.button import ButtonDeviceClass, ButtonEntity
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import VeluxConfigEntry
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up button entities for the Velux integration."""
|
||||
async_add_entities(
|
||||
[VeluxGatewayRebootButton(config_entry.entry_id, config_entry.runtime_data)]
|
||||
)
|
||||
|
||||
|
||||
class VeluxGatewayRebootButton(ButtonEntity):
|
||||
"""Representation of the Velux Gateway reboot button."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_device_class = ButtonDeviceClass.RESTART
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
def __init__(self, config_entry_id: str, pyvlx: PyVLX) -> None:
|
||||
"""Initialize the gateway reboot button."""
|
||||
self.pyvlx = pyvlx
|
||||
self._attr_unique_id = f"{config_entry_id}_reboot-gateway"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"gateway_{config_entry_id}")},
|
||||
)
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press - reboot the gateway."""
|
||||
try:
|
||||
await self.pyvlx.reboot_gateway()
|
||||
except PyVLXException as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="reboot_failed",
|
||||
) from ex
|
||||
@@ -85,7 +85,7 @@ class VeluxConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
updates={CONF_HOST: self.discovery_data[CONF_HOST]}
|
||||
)
|
||||
|
||||
# Abort if config_entry already exists without unigue_id configured.
|
||||
# Abort if config_entry already exists without unique_id configured.
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN):
|
||||
if (
|
||||
entry.data[CONF_HOST] == self.discovery_data[CONF_HOST]
|
||||
|
||||
@@ -5,5 +5,11 @@ from logging import getLogger
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "velux"
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.COVER, Platform.LIGHT, Platform.SCENE]
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.COVER,
|
||||
Platform.LIGHT,
|
||||
Platform.SCENE,
|
||||
]
|
||||
LOGGER = getLogger(__package__)
|
||||
|
||||
@@ -32,13 +32,13 @@ PARALLEL_UPDATES = 1
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: VeluxConfigEntry,
|
||||
config_entry: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up cover(s) for Velux platform."""
|
||||
pyvlx = config.runtime_data
|
||||
pyvlx = config_entry.runtime_data
|
||||
async_add_entities(
|
||||
VeluxCover(node, config.entry_id)
|
||||
VeluxCover(node, config_entry.entry_id)
|
||||
for node in pyvlx.nodes
|
||||
if isinstance(node, OpeningDevice)
|
||||
)
|
||||
|
||||
@@ -18,22 +18,23 @@ class VeluxEntity(Entity):
|
||||
def __init__(self, node: Node, config_entry_id: str) -> None:
|
||||
"""Initialize the Velux device."""
|
||||
self.node = node
|
||||
self._attr_unique_id = (
|
||||
unique_id = (
|
||||
node.serial_number
|
||||
if node.serial_number
|
||||
else f"{config_entry_id}_{node.node_id}"
|
||||
)
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(
|
||||
DOMAIN,
|
||||
node.serial_number
|
||||
if node.serial_number
|
||||
else f"{config_entry_id}_{node.node_id}",
|
||||
unique_id,
|
||||
)
|
||||
},
|
||||
name=node.name if node.name else f"#{node.node_id}",
|
||||
serial_number=node.serial_number,
|
||||
via_device=(DOMAIN, f"gateway_{config_entry_id}"),
|
||||
)
|
||||
|
||||
@callback
|
||||
|
||||
@@ -18,13 +18,13 @@ PARALLEL_UPDATES = 1
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: VeluxConfigEntry,
|
||||
config_entry: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up light(s) for Velux platform."""
|
||||
pyvlx = config.runtime_data
|
||||
pyvlx = config_entry.runtime_data
|
||||
async_add_entities(
|
||||
VeluxLight(node, config.entry_id)
|
||||
VeluxLight(node, config_entry.entry_id)
|
||||
for node in pyvlx.nodes
|
||||
if isinstance(node, LighteningDevice)
|
||||
)
|
||||
@@ -35,6 +35,7 @@ class VeluxLight(VeluxEntity, LightEntity):
|
||||
|
||||
_attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
_attr_color_mode = ColorMode.BRIGHTNESS
|
||||
_attr_name = None
|
||||
|
||||
node: LighteningDevice
|
||||
|
||||
|
||||
76
homeassistant/components/velux/quality_scale.yaml
Normal file
76
homeassistant/components/velux/quality_scale.yaml
Normal file
@@ -0,0 +1,76 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: todo
|
||||
comment: needs to move to async_setup
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency:
|
||||
status: todo
|
||||
comment: release-builds need CI
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: todo
|
||||
comment: subscribe is ok, unsubscribe needs to be added
|
||||
entity-unique-id: done
|
||||
has-entity-name:
|
||||
status: todo
|
||||
comment: scenes need fixing
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup:
|
||||
status: todo
|
||||
comment: needs rework, failure to setup currently only returns false
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: todo
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates:
|
||||
status: todo
|
||||
comment: button still needs it
|
||||
reauthentication-flow: todo
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: cleanup mock_config_entry vs mock_user_config_entry, cleanup mock_pyvlx vs mock_velux_client, remove unused freezer in test_cover_closed, add tests where missing
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: todo
|
||||
comment: scenes need devices
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -15,11 +15,11 @@ PARALLEL_UPDATES = 1
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: VeluxConfigEntry,
|
||||
config_entry: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the scenes for Velux platform."""
|
||||
pyvlx = config.runtime_data
|
||||
pyvlx = config_entry.runtime_data
|
||||
|
||||
entities = [VeluxScene(scene) for scene in pyvlx.scenes]
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -36,9 +36,20 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"reboot_failed": {
|
||||
"message": "Failed to reboot gateway. Try again in a few moments or power cycle the device manually"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_reboot_service": {
|
||||
"description": "The `velux.reboot_gateway` service is deprecated and will be removed in Home Assistant 2026.6.0. Please use the 'Restart' button entity instead. You can find this button in the device page for your KLF 200 Gateway or by searching for 'restart' in your entity list.",
|
||||
"title": "Velux reboot service is deprecated"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reboot_gateway": {
|
||||
"description": "Reboots the KLF200 Gateway.",
|
||||
"description": "Reboots the KLF200 Gateway",
|
||||
"name": "Reboot gateway"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import DOMAIN, SERVICE_UPDATE_DEVS, VS_COORDINATOR, VS_MANAGER
|
||||
@@ -121,3 +122,21 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
hass.config_entries.async_update_entry(config_entry, minor_version=2)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
manager = hass.data[DOMAIN][VS_MANAGER]
|
||||
await manager.get_devices()
|
||||
for dev in manager.devices:
|
||||
if isinstance(dev.sub_device_no, int):
|
||||
device_id = f"{dev.cid}{dev.sub_device_no!s}"
|
||||
else:
|
||||
device_id = dev.cid
|
||||
identifier = next(iter(device_entry.identifiers), None)
|
||||
if identifier and device_id == identifier[1]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user