mirror of
https://github.com/home-assistant/core.git
synced 2025-07-21 12:17:07 +00:00
Merge branch 'dev' of https://github.com/redgtech-automacao/core into dev
This commit is contained in:
commit
14c8a01f96
BIN
.github/assets/screenshot-integrations.png
vendored
BIN
.github/assets/screenshot-integrations.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 65 KiB After Width: | Height: | Size: 99 KiB |
2
Dockerfile
generated
2
Dockerfile
generated
@ -13,7 +13,7 @@ ENV \
|
|||||||
ARG QEMU_CPU
|
ARG QEMU_CPU
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.5.27
|
RUN pip3 install uv==0.6.0
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
@ -134,14 +134,12 @@ DATA_REGISTRIES_LOADED: HassKey[None] = HassKey("bootstrap_registries_loaded")
|
|||||||
LOG_SLOW_STARTUP_INTERVAL = 60
|
LOG_SLOW_STARTUP_INTERVAL = 60
|
||||||
SLOW_STARTUP_CHECK_INTERVAL = 1
|
SLOW_STARTUP_CHECK_INTERVAL = 1
|
||||||
|
|
||||||
|
STAGE_0_SUBSTAGE_TIMEOUT = 60
|
||||||
STAGE_1_TIMEOUT = 120
|
STAGE_1_TIMEOUT = 120
|
||||||
STAGE_2_TIMEOUT = 300
|
STAGE_2_TIMEOUT = 300
|
||||||
WRAP_UP_TIMEOUT = 300
|
WRAP_UP_TIMEOUT = 300
|
||||||
COOLDOWN_TIME = 60
|
COOLDOWN_TIME = 60
|
||||||
|
|
||||||
|
|
||||||
DEBUGGER_INTEGRATIONS = {"debugpy"}
|
|
||||||
|
|
||||||
# Core integrations are unconditionally loaded
|
# Core integrations are unconditionally loaded
|
||||||
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
|
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
|
||||||
|
|
||||||
@ -152,6 +150,10 @@ LOGGING_AND_HTTP_DEPS_INTEGRATIONS = {
|
|||||||
"isal",
|
"isal",
|
||||||
# Set log levels
|
# Set log levels
|
||||||
"logger",
|
"logger",
|
||||||
|
# Ensure network config is available
|
||||||
|
# before hassio or any other integration is
|
||||||
|
# loaded that might create an aiohttp client session
|
||||||
|
"network",
|
||||||
# Error logging
|
# Error logging
|
||||||
"system_log",
|
"system_log",
|
||||||
"sentry",
|
"sentry",
|
||||||
@ -172,12 +174,27 @@ FRONTEND_INTEGRATIONS = {
|
|||||||
# add it here.
|
# add it here.
|
||||||
"backup",
|
"backup",
|
||||||
}
|
}
|
||||||
RECORDER_INTEGRATIONS = {
|
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
|
||||||
# Setup after frontend
|
# The substage containing recorder should have no timeout, as it could cancel a database migration.
|
||||||
# To record data
|
# Recorder freezes "recorder" timeout during a migration, but it does not freeze other timeouts.
|
||||||
"recorder",
|
# The substages preceding it should also have no timeout, until we ensure that the recorder
|
||||||
}
|
# is not accidentally promoted as a dependency of any of the integrations in them.
|
||||||
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb", "zeroconf")
|
# If we add timeouts to the frontend substages, we should make sure they don't apply in recovery mode.
|
||||||
|
STAGE_0_INTEGRATIONS = (
|
||||||
|
# Load logging and http deps as soon as possible
|
||||||
|
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
|
||||||
|
# Setup frontend
|
||||||
|
("frontend", FRONTEND_INTEGRATIONS, None),
|
||||||
|
# Setup recorder
|
||||||
|
("recorder", {"recorder"}, None),
|
||||||
|
# Start up debuggers. Start these first in case they want to wait.
|
||||||
|
("debugger", {"debugpy"}, STAGE_0_SUBSTAGE_TIMEOUT),
|
||||||
|
# Zeroconf is used for mdns resolution in aiohttp client helper.
|
||||||
|
("zeroconf", {"zeroconf"}, STAGE_0_SUBSTAGE_TIMEOUT),
|
||||||
|
)
|
||||||
|
|
||||||
|
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb")
|
||||||
|
# Stage 1 integrations are not to be preimported in bootstrap.
|
||||||
STAGE_1_INTEGRATIONS = {
|
STAGE_1_INTEGRATIONS = {
|
||||||
# We need to make sure discovery integrations
|
# We need to make sure discovery integrations
|
||||||
# update their deps before stage 2 integrations
|
# update their deps before stage 2 integrations
|
||||||
@ -189,9 +206,8 @@ STAGE_1_INTEGRATIONS = {
|
|||||||
"mqtt_eventstream",
|
"mqtt_eventstream",
|
||||||
# To provide account link implementations
|
# To provide account link implementations
|
||||||
"cloud",
|
"cloud",
|
||||||
# Ensure supervisor is available
|
|
||||||
"hassio",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_INTEGRATIONS = {
|
DEFAULT_INTEGRATIONS = {
|
||||||
# These integrations are set up unless recovery mode is activated.
|
# These integrations are set up unless recovery mode is activated.
|
||||||
#
|
#
|
||||||
@ -232,22 +248,12 @@ DEFAULT_INTEGRATIONS_SUPERVISOR = {
|
|||||||
# These integrations are set up if using the Supervisor
|
# These integrations are set up if using the Supervisor
|
||||||
"hassio",
|
"hassio",
|
||||||
}
|
}
|
||||||
|
|
||||||
CRITICAL_INTEGRATIONS = {
|
CRITICAL_INTEGRATIONS = {
|
||||||
# Recovery mode is activated if these integrations fail to set up
|
# Recovery mode is activated if these integrations fail to set up
|
||||||
"frontend",
|
"frontend",
|
||||||
}
|
}
|
||||||
|
|
||||||
SETUP_ORDER = (
|
|
||||||
# Load logging and http deps as soon as possible
|
|
||||||
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
|
|
||||||
# Setup frontend
|
|
||||||
("frontend", FRONTEND_INTEGRATIONS),
|
|
||||||
# Setup recorder
|
|
||||||
("recorder", RECORDER_INTEGRATIONS),
|
|
||||||
# Start up debuggers. Start these first in case they want to wait.
|
|
||||||
("debugger", DEBUGGER_INTEGRATIONS),
|
|
||||||
)
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Storage keys we are likely to load during startup
|
# Storage keys we are likely to load during startup
|
||||||
# in order of when we expect to load them.
|
# in order of when we expect to load them.
|
||||||
@ -694,7 +700,6 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
|
|||||||
return deps_dir
|
return deps_dir
|
||||||
|
|
||||||
|
|
||||||
@core.callback
|
|
||||||
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
||||||
"""Get domains of components to set up."""
|
"""Get domains of components to set up."""
|
||||||
# Filter out the repeating and common config section [homeassistant]
|
# Filter out the repeating and common config section [homeassistant]
|
||||||
@ -890,69 +895,48 @@ async def _async_set_up_integrations(
|
|||||||
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
||||||
hass, config
|
hass, config
|
||||||
)
|
)
|
||||||
|
stage_2_domains = domains_to_setup.copy()
|
||||||
|
|
||||||
# Initialize recorder
|
# Initialize recorder
|
||||||
if "recorder" in domains_to_setup:
|
if "recorder" in domains_to_setup:
|
||||||
recorder.async_initialize_recorder(hass)
|
recorder.async_initialize_recorder(hass)
|
||||||
|
|
||||||
pre_stage_domains = [
|
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
|
||||||
(name, domains_to_setup & domain_group) for name, domain_group in SETUP_ORDER
|
*(
|
||||||
|
(name, domain_group & domains_to_setup, timeout)
|
||||||
|
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
|
||||||
|
),
|
||||||
|
("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT),
|
||||||
]
|
]
|
||||||
|
|
||||||
# calculate what components to setup in what stage
|
_LOGGER.info("Setting up stage 0 and 1")
|
||||||
stage_1_domains: set[str] = set()
|
for name, domain_group, timeout in stage_0_and_1_domains:
|
||||||
|
if not domain_group:
|
||||||
|
continue
|
||||||
|
|
||||||
# Find all dependencies of any dependency of any stage 1 integration that
|
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
||||||
# we plan on loading and promote them to stage 1. This is done only to not
|
to_be_loaded = domain_group.copy()
|
||||||
# get misleading log messages
|
to_be_loaded.update(
|
||||||
deps_promotion: set[str] = STAGE_1_INTEGRATIONS
|
dep
|
||||||
while deps_promotion:
|
for domain in domain_group
|
||||||
old_deps_promotion = deps_promotion
|
if (integration := integration_cache.get(domain)) is not None
|
||||||
deps_promotion = set()
|
for dep in integration.all_dependencies
|
||||||
|
)
|
||||||
|
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
||||||
|
stage_2_domains -= to_be_loaded
|
||||||
|
|
||||||
for domain in old_deps_promotion:
|
if timeout is None:
|
||||||
if domain not in domains_to_setup or domain in stage_1_domains:
|
|
||||||
continue
|
|
||||||
|
|
||||||
stage_1_domains.add(domain)
|
|
||||||
|
|
||||||
if (dep_itg := integration_cache.get(domain)) is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
deps_promotion.update(dep_itg.all_dependencies)
|
|
||||||
|
|
||||||
stage_2_domains = domains_to_setup - stage_1_domains
|
|
||||||
|
|
||||||
for name, domain_group in pre_stage_domains:
|
|
||||||
if domain_group:
|
|
||||||
stage_2_domains -= domain_group
|
|
||||||
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
|
||||||
to_be_loaded = domain_group.copy()
|
|
||||||
to_be_loaded.update(
|
|
||||||
dep
|
|
||||||
for domain in domain_group
|
|
||||||
if (integration := integration_cache.get(domain)) is not None
|
|
||||||
for dep in integration.all_dependencies
|
|
||||||
)
|
|
||||||
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
|
||||||
await _async_setup_multi_components(hass, domain_group, config)
|
await _async_setup_multi_components(hass, domain_group, config)
|
||||||
|
else:
|
||||||
# Enables after dependencies when setting up stage 1 domains
|
try:
|
||||||
async_set_domains_to_be_loaded(hass, stage_1_domains)
|
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||||
|
await _async_setup_multi_components(hass, domain_group, config)
|
||||||
# Start setup
|
except TimeoutError:
|
||||||
if stage_1_domains:
|
_LOGGER.warning(
|
||||||
_LOGGER.info("Setting up stage 1: %s", stage_1_domains)
|
"Setup timed out for %s waiting on %s - moving forward",
|
||||||
try:
|
name,
|
||||||
async with hass.timeout.async_timeout(
|
hass._active_tasks, # noqa: SLF001
|
||||||
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
|
)
|
||||||
):
|
|
||||||
await _async_setup_multi_components(hass, stage_1_domains, config)
|
|
||||||
except TimeoutError:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Setup timed out for stage 1 waiting on %s - moving forward",
|
|
||||||
hass._active_tasks, # noqa: SLF001
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add after dependencies when setting up stage 2 domains
|
# Add after dependencies when setting up stage 2 domains
|
||||||
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
||||||
|
@ -7,7 +7,7 @@ from dataclasses import dataclass
|
|||||||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_HOST,
|
CONF_HOST,
|
||||||
CONF_NAME,
|
CONF_NAME,
|
||||||
@ -123,12 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
|
|||||||
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
|
||||||
"""Unload AdGuard Home config entry."""
|
"""Unload AdGuard Home config entry."""
|
||||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
loaded_entries = [
|
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
entry
|
|
||||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
|
||||||
if entry.state == ConfigEntryState.LOADED
|
|
||||||
]
|
|
||||||
if len(loaded_entries) == 1:
|
|
||||||
# This is the last loaded instance of AdGuard, deregister any services
|
# This is the last loaded instance of AdGuard, deregister any services
|
||||||
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
|
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
|
||||||
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)
|
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)
|
||||||
|
@ -6,6 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["airgradient==0.9.1"],
|
"requirements": ["airgradient==0.9.2"],
|
||||||
"zeroconf": ["_airgradient._tcp.local."]
|
"zeroconf": ["_airgradient._tcp.local."]
|
||||||
}
|
}
|
||||||
|
@ -90,7 +90,7 @@
|
|||||||
},
|
},
|
||||||
"alarm_arm_home": {
|
"alarm_arm_home": {
|
||||||
"name": "Arm home",
|
"name": "Arm home",
|
||||||
"description": "Sets the alarm to: _armed, but someone is home_.",
|
"description": "Arms the alarm in the home mode.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"code": {
|
"code": {
|
||||||
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
||||||
@ -100,7 +100,7 @@
|
|||||||
},
|
},
|
||||||
"alarm_arm_away": {
|
"alarm_arm_away": {
|
||||||
"name": "Arm away",
|
"name": "Arm away",
|
||||||
"description": "Sets the alarm to: _armed, no one home_.",
|
"description": "Arms the alarm in the away mode.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"code": {
|
"code": {
|
||||||
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
||||||
@ -110,7 +110,7 @@
|
|||||||
},
|
},
|
||||||
"alarm_arm_night": {
|
"alarm_arm_night": {
|
||||||
"name": "Arm night",
|
"name": "Arm night",
|
||||||
"description": "Sets the alarm to: _armed for the night_.",
|
"description": "Arms the alarm in the night mode.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"code": {
|
"code": {
|
||||||
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
||||||
@ -120,7 +120,7 @@
|
|||||||
},
|
},
|
||||||
"alarm_arm_vacation": {
|
"alarm_arm_vacation": {
|
||||||
"name": "Arm vacation",
|
"name": "Arm vacation",
|
||||||
"description": "Sets the alarm to: _armed for vacation_.",
|
"description": "Arms the alarm in the vacation mode.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"code": {
|
"code": {
|
||||||
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
||||||
@ -130,7 +130,7 @@
|
|||||||
},
|
},
|
||||||
"alarm_trigger": {
|
"alarm_trigger": {
|
||||||
"name": "Trigger",
|
"name": "Trigger",
|
||||||
"description": "Trigger the alarm manually.",
|
"description": "Triggers the alarm manually.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"code": {
|
"code": {
|
||||||
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
||||||
|
@ -1,16 +1,23 @@
|
|||||||
"""Conversation support for Anthropic."""
|
"""Conversation support for Anthropic."""
|
||||||
|
|
||||||
from collections.abc import Callable
|
from collections.abc import AsyncGenerator, Callable
|
||||||
import json
|
import json
|
||||||
from typing import Any, Literal, cast
|
from typing import Any, Literal
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
|
from anthropic import AsyncStream
|
||||||
from anthropic._types import NOT_GIVEN
|
from anthropic._types import NOT_GIVEN
|
||||||
from anthropic.types import (
|
from anthropic.types import (
|
||||||
|
InputJSONDelta,
|
||||||
Message,
|
Message,
|
||||||
MessageParam,
|
MessageParam,
|
||||||
|
MessageStreamEvent,
|
||||||
|
RawContentBlockDeltaEvent,
|
||||||
|
RawContentBlockStartEvent,
|
||||||
|
RawContentBlockStopEvent,
|
||||||
TextBlock,
|
TextBlock,
|
||||||
TextBlockParam,
|
TextBlockParam,
|
||||||
|
TextDelta,
|
||||||
ToolParam,
|
ToolParam,
|
||||||
ToolResultBlockParam,
|
ToolResultBlockParam,
|
||||||
ToolUseBlock,
|
ToolUseBlock,
|
||||||
@ -109,7 +116,7 @@ def _convert_content(chat_content: conversation.Content) -> MessageParam:
|
|||||||
type="tool_use",
|
type="tool_use",
|
||||||
id=tool_call.id,
|
id=tool_call.id,
|
||||||
name=tool_call.tool_name,
|
name=tool_call.tool_name,
|
||||||
input=json.dumps(tool_call.tool_args),
|
input=tool_call.tool_args,
|
||||||
)
|
)
|
||||||
for tool_call in chat_content.tool_calls or ()
|
for tool_call in chat_content.tool_calls or ()
|
||||||
],
|
],
|
||||||
@ -124,6 +131,66 @@ def _convert_content(chat_content: conversation.Content) -> MessageParam:
|
|||||||
raise ValueError(f"Unexpected content type: {type(chat_content)}")
|
raise ValueError(f"Unexpected content type: {type(chat_content)}")
|
||||||
|
|
||||||
|
|
||||||
|
async def _transform_stream(
|
||||||
|
result: AsyncStream[MessageStreamEvent],
|
||||||
|
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||||
|
"""Transform the response stream into HA format.
|
||||||
|
|
||||||
|
A typical stream of responses might look something like the following:
|
||||||
|
- RawMessageStartEvent with no content
|
||||||
|
- RawContentBlockStartEvent with an empty TextBlock
|
||||||
|
- RawContentBlockDeltaEvent with a TextDelta
|
||||||
|
- RawContentBlockDeltaEvent with a TextDelta
|
||||||
|
- RawContentBlockDeltaEvent with a TextDelta
|
||||||
|
- ...
|
||||||
|
- RawContentBlockStopEvent
|
||||||
|
- RawContentBlockStartEvent with ToolUseBlock specifying the function name
|
||||||
|
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||||
|
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||||
|
- ...
|
||||||
|
- RawContentBlockStopEvent
|
||||||
|
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||||
|
- RawMessageStopEvent(type='message_stop')
|
||||||
|
"""
|
||||||
|
if result is None:
|
||||||
|
raise TypeError("Expected a stream of messages")
|
||||||
|
|
||||||
|
current_tool_call: dict | None = None
|
||||||
|
|
||||||
|
async for response in result:
|
||||||
|
LOGGER.debug("Received response: %s", response)
|
||||||
|
|
||||||
|
if isinstance(response, RawContentBlockStartEvent):
|
||||||
|
if isinstance(response.content_block, ToolUseBlock):
|
||||||
|
current_tool_call = {
|
||||||
|
"id": response.content_block.id,
|
||||||
|
"name": response.content_block.name,
|
||||||
|
"input": "",
|
||||||
|
}
|
||||||
|
elif isinstance(response.content_block, TextBlock):
|
||||||
|
yield {"role": "assistant"}
|
||||||
|
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||||
|
if isinstance(response.delta, InputJSONDelta):
|
||||||
|
if current_tool_call is None:
|
||||||
|
raise ValueError("Unexpected delta without a tool call")
|
||||||
|
current_tool_call["input"] += response.delta.partial_json
|
||||||
|
elif isinstance(response.delta, TextDelta):
|
||||||
|
LOGGER.debug("yielding delta: %s", response.delta.text)
|
||||||
|
yield {"content": response.delta.text}
|
||||||
|
elif isinstance(response, RawContentBlockStopEvent):
|
||||||
|
if current_tool_call:
|
||||||
|
yield {
|
||||||
|
"tool_calls": [
|
||||||
|
llm.ToolInput(
|
||||||
|
id=current_tool_call["id"],
|
||||||
|
tool_name=current_tool_call["name"],
|
||||||
|
tool_args=json.loads(current_tool_call["input"]),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
}
|
||||||
|
current_tool_call = None
|
||||||
|
|
||||||
|
|
||||||
class AnthropicConversationEntity(
|
class AnthropicConversationEntity(
|
||||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||||
):
|
):
|
||||||
@ -206,58 +273,30 @@ class AnthropicConversationEntity(
|
|||||||
# To prevent infinite loops, we limit the number of iterations
|
# To prevent infinite loops, we limit the number of iterations
|
||||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||||
try:
|
try:
|
||||||
response = await client.messages.create(
|
stream = await client.messages.create(
|
||||||
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
||||||
messages=messages,
|
messages=messages,
|
||||||
tools=tools or NOT_GIVEN,
|
tools=tools or NOT_GIVEN,
|
||||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||||
system=system.content,
|
system=system.content,
|
||||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||||
|
stream=True,
|
||||||
)
|
)
|
||||||
except anthropic.AnthropicError as err:
|
except anthropic.AnthropicError as err:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
LOGGER.debug("Response %s", response)
|
messages.extend(
|
||||||
|
|
||||||
messages.append(_message_convert(response))
|
|
||||||
|
|
||||||
text = "".join(
|
|
||||||
[
|
[
|
||||||
content.text
|
_convert_content(content)
|
||||||
for content in response.content
|
async for content in chat_log.async_add_delta_content_stream(
|
||||||
if isinstance(content, TextBlock)
|
user_input.agent_id, _transform_stream(stream)
|
||||||
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
tool_inputs = [
|
|
||||||
llm.ToolInput(
|
|
||||||
id=tool_call.id,
|
|
||||||
tool_name=tool_call.name,
|
|
||||||
tool_args=cast(dict[str, Any], tool_call.input),
|
|
||||||
)
|
|
||||||
for tool_call in response.content
|
|
||||||
if isinstance(tool_call, ToolUseBlock)
|
|
||||||
]
|
|
||||||
|
|
||||||
tool_results = [
|
if not chat_log.unresponded_tool_results:
|
||||||
ToolResultBlockParam(
|
|
||||||
type="tool_result",
|
|
||||||
tool_use_id=tool_response.tool_call_id,
|
|
||||||
content=json.dumps(tool_response.tool_result),
|
|
||||||
)
|
|
||||||
async for tool_response in chat_log.async_add_assistant_content(
|
|
||||||
conversation.AssistantContent(
|
|
||||||
agent_id=user_input.agent_id,
|
|
||||||
content=text,
|
|
||||||
tool_calls=tool_inputs or None,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
if tool_results:
|
|
||||||
messages.append(MessageParam(role="user", content=tool_results))
|
|
||||||
|
|
||||||
if not tool_inputs:
|
|
||||||
break
|
break
|
||||||
|
|
||||||
response_content = chat_log.content[-1]
|
response_content = chat_log.content[-1]
|
||||||
|
@ -19,10 +19,20 @@ class ApSystemsEntity(Entity):
|
|||||||
data: ApSystemsData,
|
data: ApSystemsData,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the APsystems entity."""
|
"""Initialize the APsystems entity."""
|
||||||
|
|
||||||
|
# Handle device version safely
|
||||||
|
sw_version = None
|
||||||
|
if data.coordinator.device_version:
|
||||||
|
version_parts = data.coordinator.device_version.split(" ")
|
||||||
|
if len(version_parts) > 1:
|
||||||
|
sw_version = version_parts[1]
|
||||||
|
else:
|
||||||
|
sw_version = version_parts[0]
|
||||||
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, data.device_id)},
|
identifiers={(DOMAIN, data.device_id)},
|
||||||
manufacturer="APsystems",
|
manufacturer="APsystems",
|
||||||
model="EZ1-M",
|
model="EZ1-M",
|
||||||
serial_number=data.device_id,
|
serial_number=data.device_id,
|
||||||
sw_version=data.coordinator.device_version.split(" ")[1],
|
sw_version=sw_version,
|
||||||
)
|
)
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
|
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["arcam"],
|
"loggers": ["arcam"],
|
||||||
"requirements": ["arcam-fmj==1.5.2"],
|
"requirements": ["arcam-fmj==1.8.0"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||||
|
@ -19,6 +19,7 @@ from .const import (
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
AssistSatelliteEntityFeature,
|
AssistSatelliteEntityFeature,
|
||||||
)
|
)
|
||||||
|
from .entity import AssistSatelliteConfiguration
|
||||||
|
|
||||||
CONNECTION_TEST_TIMEOUT = 30
|
CONNECTION_TEST_TIMEOUT = 30
|
||||||
|
|
||||||
@ -91,7 +92,16 @@ def websocket_get_configuration(
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
config_dict = asdict(satellite.async_get_configuration())
|
try:
|
||||||
|
config_dict = asdict(satellite.async_get_configuration())
|
||||||
|
except NotImplementedError:
|
||||||
|
# Stub configuration
|
||||||
|
config_dict = asdict(
|
||||||
|
AssistSatelliteConfiguration(
|
||||||
|
available_wake_words=[], active_wake_words=[], max_active_wake_words=1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
config_dict["pipeline_entity_id"] = satellite.pipeline_entity_id
|
config_dict["pipeline_entity_id"] = satellite.pipeline_entity_id
|
||||||
config_dict["vad_entity_id"] = satellite.vad_sensitivity_entity_id
|
config_dict["vad_entity_id"] = satellite.vad_sensitivity_entity_id
|
||||||
|
|
||||||
|
@ -24,6 +24,8 @@ PLATFORMS = [
|
|||||||
Platform.FAN,
|
Platform.FAN,
|
||||||
Platform.LIGHT,
|
Platform.LIGHT,
|
||||||
Platform.SELECT,
|
Platform.SELECT,
|
||||||
|
Platform.SWITCH,
|
||||||
|
Platform.TIME,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -14,5 +14,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/balboa",
|
"documentation": "https://www.home-assistant.io/integrations/balboa",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pybalboa"],
|
"loggers": ["pybalboa"],
|
||||||
"requirements": ["pybalboa==1.1.2"]
|
"requirements": ["pybalboa==1.1.3"]
|
||||||
}
|
}
|
||||||
|
@ -78,6 +78,19 @@
|
|||||||
"high": "High"
|
"high": "High"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"switch": {
|
||||||
|
"filter_cycle_2_enabled": {
|
||||||
|
"name": "Filter cycle 2 enabled"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"time": {
|
||||||
|
"filter_cycle_start": {
|
||||||
|
"name": "Filter cycle {index} start"
|
||||||
|
},
|
||||||
|
"filter_cycle_end": {
|
||||||
|
"name": "Filter cycle {index} end"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
48
homeassistant/components/balboa/switch.py
Normal file
48
homeassistant/components/balboa/switch.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
"""Support for Balboa switches."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pybalboa import SpaClient
|
||||||
|
|
||||||
|
from homeassistant.components.switch import SwitchEntity
|
||||||
|
from homeassistant.const import EntityCategory
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from . import BalboaConfigEntry
|
||||||
|
from .entity import BalboaEntity
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: BalboaConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the spa's switches."""
|
||||||
|
spa = entry.runtime_data
|
||||||
|
async_add_entities([BalboaSwitchEntity(spa)])
|
||||||
|
|
||||||
|
|
||||||
|
class BalboaSwitchEntity(BalboaEntity, SwitchEntity):
|
||||||
|
"""Representation of a Balboa switch entity."""
|
||||||
|
|
||||||
|
def __init__(self, spa: SpaClient) -> None:
|
||||||
|
"""Initialize a Balboa switch entity."""
|
||||||
|
super().__init__(spa, "filter_cycle_2_enabled")
|
||||||
|
self._attr_entity_category = EntityCategory.CONFIG
|
||||||
|
self._attr_translation_key = "filter_cycle_2_enabled"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return True if entity is on."""
|
||||||
|
return self._client.filter_cycle_2_enabled
|
||||||
|
|
||||||
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the entity on."""
|
||||||
|
await self._client.configure_filter_cycle(2, enabled=True)
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the entity off."""
|
||||||
|
await self._client.configure_filter_cycle(2, enabled=False)
|
56
homeassistant/components/balboa/time.py
Normal file
56
homeassistant/components/balboa/time.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
"""Support for Balboa times."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import time
|
||||||
|
import itertools
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pybalboa import SpaClient
|
||||||
|
|
||||||
|
from homeassistant.components.time import TimeEntity
|
||||||
|
from homeassistant.const import EntityCategory
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from . import BalboaConfigEntry
|
||||||
|
from .entity import BalboaEntity
|
||||||
|
|
||||||
|
FILTER_CYCLE = "filter_cycle_"
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: BalboaConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the spa's times."""
|
||||||
|
spa = entry.runtime_data
|
||||||
|
async_add_entities(
|
||||||
|
BalboaTimeEntity(spa, index, period)
|
||||||
|
for index, period in itertools.product((1, 2), ("start", "end"))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BalboaTimeEntity(BalboaEntity, TimeEntity):
|
||||||
|
"""Representation of a Balboa time entity."""
|
||||||
|
|
||||||
|
entity_category = EntityCategory.CONFIG
|
||||||
|
|
||||||
|
def __init__(self, spa: SpaClient, index: int, period: str) -> None:
|
||||||
|
"""Initialize a Balboa time entity."""
|
||||||
|
super().__init__(spa, f"{FILTER_CYCLE}{index}_{period}")
|
||||||
|
self.index = index
|
||||||
|
self.period = period
|
||||||
|
self._attr_translation_key = f"{FILTER_CYCLE}{period}"
|
||||||
|
self._attr_translation_placeholders = {"index": str(index)}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> time | None:
|
||||||
|
"""Return the value reported by the time."""
|
||||||
|
return getattr(self._client, f"{FILTER_CYCLE}{self.index}_{self.period}")
|
||||||
|
|
||||||
|
async def async_set_value(self, value: time) -> None:
|
||||||
|
"""Change the time."""
|
||||||
|
args: dict[str, Any] = {self.period: value}
|
||||||
|
await self._client.configure_filter_cycle(self.index, **args)
|
@ -4,12 +4,13 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
|
||||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
|
|
||||||
from . import BangOlufsenConfigEntry
|
from . import BangOlufsenConfigEntry
|
||||||
from .const import DOMAIN
|
from .const import DEVICE_BUTTONS, DOMAIN
|
||||||
|
|
||||||
|
|
||||||
async def async_get_config_entry_diagnostics(
|
async def async_get_config_entry_diagnostics(
|
||||||
@ -25,8 +26,9 @@ async def async_get_config_entry_diagnostics(
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert config_entry.unique_id
|
assert config_entry.unique_id
|
||||||
|
|
||||||
# Add media_player entity's state
|
|
||||||
entity_registry = er.async_get(hass)
|
entity_registry = er.async_get(hass)
|
||||||
|
|
||||||
|
# Add media_player entity's state
|
||||||
if entity_id := entity_registry.async_get_entity_id(
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
MEDIA_PLAYER_DOMAIN, DOMAIN, config_entry.unique_id
|
MEDIA_PLAYER_DOMAIN, DOMAIN, config_entry.unique_id
|
||||||
):
|
):
|
||||||
@ -37,4 +39,16 @@ async def async_get_config_entry_diagnostics(
|
|||||||
state_dict.pop("context")
|
state_dict.pop("context")
|
||||||
data["media_player"] = state_dict
|
data["media_player"] = state_dict
|
||||||
|
|
||||||
|
# Add button Event entity states (if enabled)
|
||||||
|
for device_button in DEVICE_BUTTONS:
|
||||||
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
|
EVENT_DOMAIN, DOMAIN, f"{config_entry.unique_id}_{device_button}"
|
||||||
|
):
|
||||||
|
if state := hass.states.get(entity_id):
|
||||||
|
state_dict = dict(state.as_dict())
|
||||||
|
|
||||||
|
# Remove context as it is not relevant
|
||||||
|
state_dict.pop("context")
|
||||||
|
data[f"{device_button}_event"] = state_dict
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
@ -5,14 +5,14 @@
|
|||||||
"title": "Manual YAML fix required for Bayesian"
|
"title": "Manual YAML fix required for Bayesian"
|
||||||
},
|
},
|
||||||
"no_prob_given_false": {
|
"no_prob_given_false": {
|
||||||
"description": "In the Bayesian integration `prob_given_false` is now a required configuration variable as there was no mathematical rationale for the previous default value. Please add this to your `configuration.yml` for `bayesian/{entity}`. These observations will be ignored until you do.",
|
"description": "In the Bayesian integration `prob_given_false` is now a required configuration variable as there was no mathematical rationale for the previous default value. Please add this to your `configuration.yaml` for `bayesian/{entity}`. These observations will be ignored until you do.",
|
||||||
"title": "Manual YAML addition required for Bayesian"
|
"title": "Manual YAML addition required for Bayesian"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"services": {
|
"services": {
|
||||||
"reload": {
|
"reload": {
|
||||||
"name": "[%key:common::action::reload%]",
|
"name": "[%key:common::action::reload%]",
|
||||||
"description": "Reloads bayesian sensors from the YAML-configuration."
|
"description": "Reloads Bayesian sensors from the YAML-configuration."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,5 +7,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["bring_api"],
|
"loggers": ["bring_api"],
|
||||||
|
"quality_scale": "platinum",
|
||||||
"requirements": ["bring-api==1.0.2"]
|
"requirements": ["bring-api==1.0.2"]
|
||||||
}
|
}
|
||||||
|
@ -10,9 +10,9 @@ rules:
|
|||||||
config-flow: done
|
config-flow: done
|
||||||
dependency-transparency: done
|
dependency-transparency: done
|
||||||
docs-actions: done
|
docs-actions: done
|
||||||
docs-high-level-description: todo
|
docs-high-level-description: done
|
||||||
docs-installation-instructions: todo
|
docs-installation-instructions: done
|
||||||
docs-removal-instructions: todo
|
docs-removal-instructions: done
|
||||||
entity-event-setup:
|
entity-event-setup:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: The integration registers no events
|
comment: The integration registers no events
|
||||||
@ -26,8 +26,10 @@ rules:
|
|||||||
# Silver
|
# Silver
|
||||||
action-exceptions: done
|
action-exceptions: done
|
||||||
config-entry-unloading: done
|
config-entry-unloading: done
|
||||||
docs-configuration-parameters: todo
|
docs-configuration-parameters:
|
||||||
docs-installation-parameters: todo
|
status: exempt
|
||||||
|
comment: Integration has no configuration parameters
|
||||||
|
docs-installation-parameters: done
|
||||||
entity-unavailable: done
|
entity-unavailable: done
|
||||||
integration-owner: done
|
integration-owner: done
|
||||||
log-when-unavailable:
|
log-when-unavailable:
|
||||||
@ -46,13 +48,15 @@ rules:
|
|||||||
discovery:
|
discovery:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: Integration is a service and has no devices.
|
comment: Integration is a service and has no devices.
|
||||||
docs-data-update: todo
|
docs-data-update: done
|
||||||
docs-examples: todo
|
docs-examples: done
|
||||||
docs-known-limitations: todo
|
docs-known-limitations: done
|
||||||
docs-supported-devices: todo
|
docs-supported-devices:
|
||||||
docs-supported-functions: todo
|
status: exempt
|
||||||
docs-troubleshooting: todo
|
comment: Integration is a service and has no devices.
|
||||||
docs-use-cases: todo
|
docs-supported-functions: done
|
||||||
|
docs-troubleshooting: done
|
||||||
|
docs-use-cases: done
|
||||||
dynamic-devices: done
|
dynamic-devices: done
|
||||||
entity-category: done
|
entity-category: done
|
||||||
entity-device-class: done
|
entity-device-class: done
|
||||||
|
@ -17,13 +17,13 @@ class BroadlinkEntity(Entity):
|
|||||||
self._device = device
|
self._device = device
|
||||||
self._coordinator = device.update_manager.coordinator
|
self._coordinator = device.update_manager.coordinator
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Call when the entity is added to hass."""
|
"""Call when the entity is added to hass."""
|
||||||
self.async_on_remove(self._coordinator.async_add_listener(self._recv_data))
|
self.async_on_remove(self._coordinator.async_add_listener(self._recv_data))
|
||||||
if self._coordinator.data:
|
if self._coordinator.data:
|
||||||
self._update_state(self._coordinator.data)
|
self._update_state(self._coordinator.data)
|
||||||
|
|
||||||
async def async_update(self):
|
async def async_update(self) -> None:
|
||||||
"""Update the state of the entity."""
|
"""Update the state of the entity."""
|
||||||
await self._coordinator.async_request_refresh()
|
await self._coordinator.async_request_refresh()
|
||||||
|
|
||||||
@ -49,7 +49,7 @@ class BroadlinkEntity(Entity):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self):
|
def available(self) -> bool:
|
||||||
"""Return True if the entity is available."""
|
"""Return True if the entity is available."""
|
||||||
return self._device.available
|
return self._device.available
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ import asyncio
|
|||||||
from collections.abc import Awaitable, Callable
|
from collections.abc import Awaitable, Callable
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
import logging
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from hass_nabucasa import Cloud
|
from hass_nabucasa import Cloud
|
||||||
@ -19,6 +20,7 @@ from homeassistant.const import (
|
|||||||
CONF_NAME,
|
CONF_NAME,
|
||||||
CONF_REGION,
|
CONF_REGION,
|
||||||
EVENT_HOMEASSISTANT_STOP,
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
|
FORMAT_DATETIME,
|
||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import Event, HassJob, HomeAssistant, ServiceCall, callback
|
from homeassistant.core import Event, HassJob, HomeAssistant, ServiceCall, callback
|
||||||
@ -33,7 +35,7 @@ from homeassistant.helpers.dispatcher import (
|
|||||||
from homeassistant.helpers.event import async_call_later
|
from homeassistant.helpers.event import async_call_later
|
||||||
from homeassistant.helpers.service import async_register_admin_service
|
from homeassistant.helpers.service import async_register_admin_service
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.loader import bind_hass
|
from homeassistant.loader import async_get_integration, bind_hass
|
||||||
from homeassistant.util.signal_type import SignalType
|
from homeassistant.util.signal_type import SignalType
|
||||||
|
|
||||||
# Pre-import backup to avoid it being imported
|
# Pre-import backup to avoid it being imported
|
||||||
@ -62,11 +64,13 @@ from .const import (
|
|||||||
CONF_THINGTALK_SERVER,
|
CONF_THINGTALK_SERVER,
|
||||||
CONF_USER_POOL_ID,
|
CONF_USER_POOL_ID,
|
||||||
DATA_CLOUD,
|
DATA_CLOUD,
|
||||||
|
DATA_CLOUD_LOG_HANDLER,
|
||||||
DATA_PLATFORMS_SETUP,
|
DATA_PLATFORMS_SETUP,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
MODE_DEV,
|
MODE_DEV,
|
||||||
MODE_PROD,
|
MODE_PROD,
|
||||||
)
|
)
|
||||||
|
from .helpers import FixedSizeQueueLogHandler
|
||||||
from .prefs import CloudPreferences
|
from .prefs import CloudPreferences
|
||||||
from .repairs import async_manage_legacy_subscription_issue
|
from .repairs import async_manage_legacy_subscription_issue
|
||||||
from .subscription import async_subscription_info
|
from .subscription import async_subscription_info
|
||||||
@ -245,6 +249,8 @@ def async_remote_ui_url(hass: HomeAssistant) -> str:
|
|||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Initialize the Home Assistant cloud."""
|
"""Initialize the Home Assistant cloud."""
|
||||||
|
log_handler = hass.data[DATA_CLOUD_LOG_HANDLER] = await _setup_log_handler(hass)
|
||||||
|
|
||||||
# Process configs
|
# Process configs
|
||||||
if DOMAIN in config:
|
if DOMAIN in config:
|
||||||
kwargs = dict(config[DOMAIN])
|
kwargs = dict(config[DOMAIN])
|
||||||
@ -267,6 +273,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
async def _shutdown(event: Event) -> None:
|
async def _shutdown(event: Event) -> None:
|
||||||
"""Shutdown event."""
|
"""Shutdown event."""
|
||||||
await cloud.stop()
|
await cloud.stop()
|
||||||
|
logging.root.removeHandler(log_handler)
|
||||||
|
del hass.data[DATA_CLOUD_LOG_HANDLER]
|
||||||
|
|
||||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||||
|
|
||||||
@ -405,3 +413,19 @@ def _setup_services(hass: HomeAssistant, prefs: CloudPreferences) -> None:
|
|||||||
async_register_admin_service(
|
async_register_admin_service(
|
||||||
hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler
|
hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _setup_log_handler(hass: HomeAssistant) -> FixedSizeQueueLogHandler:
|
||||||
|
fmt = (
|
||||||
|
"%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||||
|
)
|
||||||
|
handler = FixedSizeQueueLogHandler()
|
||||||
|
handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||||
|
|
||||||
|
integration = await async_get_integration(hass, DOMAIN)
|
||||||
|
loggers: set[str] = {integration.pkg_path, *(integration.loggers or [])}
|
||||||
|
|
||||||
|
for logger_name in loggers:
|
||||||
|
logging.getLogger(logger_name).addHandler(handler)
|
||||||
|
|
||||||
|
return handler
|
||||||
|
@ -3,17 +3,20 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import base64
|
|
||||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||||
import hashlib
|
|
||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
from typing import Any, Literal
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import ClientError
|
from aiohttp import ClientError
|
||||||
from hass_nabucasa import Cloud, CloudError
|
from hass_nabucasa import Cloud, CloudError
|
||||||
from hass_nabucasa.api import CloudApiNonRetryableError
|
from hass_nabucasa.api import CloudApiNonRetryableError
|
||||||
from hass_nabucasa.cloud_api import async_files_delete_file, async_files_list
|
from hass_nabucasa.cloud_api import (
|
||||||
|
FilesHandlerListEntry,
|
||||||
|
async_files_delete_file,
|
||||||
|
async_files_list,
|
||||||
|
)
|
||||||
|
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
|
||||||
|
|
||||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
@ -24,20 +27,11 @@ from .client import CloudClient
|
|||||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
_STORAGE_BACKUP: Literal["backup"] = "backup"
|
|
||||||
_RETRY_LIMIT = 5
|
_RETRY_LIMIT = 5
|
||||||
_RETRY_SECONDS_MIN = 60
|
_RETRY_SECONDS_MIN = 60
|
||||||
_RETRY_SECONDS_MAX = 600
|
_RETRY_SECONDS_MAX = 600
|
||||||
|
|
||||||
|
|
||||||
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
|
|
||||||
"""Calculate the MD5 hash of a file."""
|
|
||||||
file_hash = hashlib.md5()
|
|
||||||
async for chunk in stream:
|
|
||||||
file_hash.update(chunk)
|
|
||||||
return base64.b64encode(file_hash.digest()).decode()
|
|
||||||
|
|
||||||
|
|
||||||
async def async_get_backup_agents(
|
async def async_get_backup_agents(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
@ -86,11 +80,6 @@ class CloudBackupAgent(BackupAgent):
|
|||||||
self._cloud = cloud
|
self._cloud = cloud
|
||||||
self._hass = hass
|
self._hass = hass
|
||||||
|
|
||||||
@callback
|
|
||||||
def _get_backup_filename(self) -> str:
|
|
||||||
"""Return the backup filename."""
|
|
||||||
return f"{self._cloud.client.prefs.instance_id}.tar"
|
|
||||||
|
|
||||||
async def async_download_backup(
|
async def async_download_backup(
|
||||||
self,
|
self,
|
||||||
backup_id: str,
|
backup_id: str,
|
||||||
@ -101,13 +90,13 @@ class CloudBackupAgent(BackupAgent):
|
|||||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||||
:return: An async iterator that yields bytes.
|
:return: An async iterator that yields bytes.
|
||||||
"""
|
"""
|
||||||
if not await self.async_get_backup(backup_id):
|
if not (backup := await self._async_get_backup(backup_id)):
|
||||||
raise BackupAgentError("Backup not found")
|
raise BackupAgentError("Backup not found")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content = await self._cloud.files.download(
|
content = await self._cloud.files.download(
|
||||||
storage_type=_STORAGE_BACKUP,
|
storage_type=StorageType.BACKUP,
|
||||||
filename=self._get_backup_filename(),
|
filename=backup["Key"],
|
||||||
)
|
)
|
||||||
except CloudError as err:
|
except CloudError as err:
|
||||||
raise BackupAgentError(f"Failed to download backup: {err}") from err
|
raise BackupAgentError(f"Failed to download backup: {err}") from err
|
||||||
@ -129,16 +118,19 @@ class CloudBackupAgent(BackupAgent):
|
|||||||
if not backup.protected:
|
if not backup.protected:
|
||||||
raise BackupAgentError("Cloud backups must be protected")
|
raise BackupAgentError("Cloud backups must be protected")
|
||||||
|
|
||||||
base64md5hash = await _b64md5(await open_stream())
|
|
||||||
filename = self._get_backup_filename()
|
|
||||||
metadata = backup.as_dict()
|
|
||||||
size = backup.size
|
size = backup.size
|
||||||
|
try:
|
||||||
|
base64md5hash = await calculate_b64md5(open_stream, size)
|
||||||
|
except FilesError as err:
|
||||||
|
raise BackupAgentError(err) from err
|
||||||
|
filename = f"{self._cloud.client.prefs.instance_id}.tar"
|
||||||
|
metadata = backup.as_dict()
|
||||||
|
|
||||||
tries = 1
|
tries = 1
|
||||||
while tries <= _RETRY_LIMIT:
|
while tries <= _RETRY_LIMIT:
|
||||||
try:
|
try:
|
||||||
await self._cloud.files.upload(
|
await self._cloud.files.upload(
|
||||||
storage_type=_STORAGE_BACKUP,
|
storage_type=StorageType.BACKUP,
|
||||||
open_stream=open_stream,
|
open_stream=open_stream,
|
||||||
filename=filename,
|
filename=filename,
|
||||||
base64md5hash=base64md5hash,
|
base64md5hash=base64md5hash,
|
||||||
@ -179,27 +171,34 @@ class CloudBackupAgent(BackupAgent):
|
|||||||
|
|
||||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||||
"""
|
"""
|
||||||
if not await self.async_get_backup(backup_id):
|
if not (backup := await self._async_get_backup(backup_id)):
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await async_files_delete_file(
|
await async_files_delete_file(
|
||||||
self._cloud,
|
self._cloud,
|
||||||
storage_type=_STORAGE_BACKUP,
|
storage_type=StorageType.BACKUP,
|
||||||
filename=self._get_backup_filename(),
|
filename=backup["Key"],
|
||||||
)
|
)
|
||||||
except (ClientError, CloudError) as err:
|
except (ClientError, CloudError) as err:
|
||||||
raise BackupAgentError("Failed to delete backup") from err
|
raise BackupAgentError("Failed to delete backup") from err
|
||||||
|
|
||||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||||
|
"""List backups."""
|
||||||
|
backups = await self._async_list_backups()
|
||||||
|
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
|
||||||
|
|
||||||
|
async def _async_list_backups(self) -> list[FilesHandlerListEntry]:
|
||||||
"""List backups."""
|
"""List backups."""
|
||||||
try:
|
try:
|
||||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
backups = await async_files_list(
|
||||||
_LOGGER.debug("Cloud backups: %s", backups)
|
self._cloud, storage_type=StorageType.BACKUP
|
||||||
|
)
|
||||||
except (ClientError, CloudError) as err:
|
except (ClientError, CloudError) as err:
|
||||||
raise BackupAgentError("Failed to list backups") from err
|
raise BackupAgentError("Failed to list backups") from err
|
||||||
|
|
||||||
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
|
_LOGGER.debug("Cloud backups: %s", backups)
|
||||||
|
return backups
|
||||||
|
|
||||||
async def async_get_backup(
|
async def async_get_backup(
|
||||||
self,
|
self,
|
||||||
@ -207,10 +206,19 @@ class CloudBackupAgent(BackupAgent):
|
|||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> AgentBackup | None:
|
) -> AgentBackup | None:
|
||||||
"""Return a backup."""
|
"""Return a backup."""
|
||||||
backups = await self.async_list_backups()
|
if not (backup := await self._async_get_backup(backup_id)):
|
||||||
|
return None
|
||||||
|
return AgentBackup.from_dict(backup["Metadata"])
|
||||||
|
|
||||||
|
async def _async_get_backup(
|
||||||
|
self,
|
||||||
|
backup_id: str,
|
||||||
|
) -> FilesHandlerListEntry | None:
|
||||||
|
"""Return a backup."""
|
||||||
|
backups = await self._async_list_backups()
|
||||||
|
|
||||||
for backup in backups:
|
for backup in backups:
|
||||||
if backup.backup_id == backup_id:
|
if backup["Metadata"]["backup_id"] == backup_id:
|
||||||
return backup
|
return backup
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
@ -12,12 +12,14 @@ if TYPE_CHECKING:
|
|||||||
from hass_nabucasa import Cloud
|
from hass_nabucasa import Cloud
|
||||||
|
|
||||||
from .client import CloudClient
|
from .client import CloudClient
|
||||||
|
from .helpers import FixedSizeQueueLogHandler
|
||||||
|
|
||||||
DOMAIN = "cloud"
|
DOMAIN = "cloud"
|
||||||
DATA_CLOUD: HassKey[Cloud[CloudClient]] = HassKey(DOMAIN)
|
DATA_CLOUD: HassKey[Cloud[CloudClient]] = HassKey(DOMAIN)
|
||||||
DATA_PLATFORMS_SETUP: HassKey[dict[str, asyncio.Event]] = HassKey(
|
DATA_PLATFORMS_SETUP: HassKey[dict[str, asyncio.Event]] = HassKey(
|
||||||
"cloud_platforms_setup"
|
"cloud_platforms_setup"
|
||||||
)
|
)
|
||||||
|
DATA_CLOUD_LOG_HANDLER: HassKey[FixedSizeQueueLogHandler] = HassKey("cloud_log_handler")
|
||||||
EVENT_CLOUD_EVENT = "cloud_event"
|
EVENT_CLOUD_EVENT = "cloud_event"
|
||||||
|
|
||||||
REQUEST_TIMEOUT = 10
|
REQUEST_TIMEOUT = 10
|
||||||
|
31
homeassistant/components/cloud/helpers.py
Normal file
31
homeassistant/components/cloud/helpers.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""Helpers for the cloud component."""
|
||||||
|
|
||||||
|
from collections import deque
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
|
||||||
|
class FixedSizeQueueLogHandler(logging.Handler):
|
||||||
|
"""Log handler to store messages, with auto rotation."""
|
||||||
|
|
||||||
|
MAX_RECORDS = 500
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize a new LogHandler."""
|
||||||
|
super().__init__()
|
||||||
|
self._records: deque[logging.LogRecord] = deque(maxlen=self.MAX_RECORDS)
|
||||||
|
|
||||||
|
def emit(self, record: logging.LogRecord) -> None:
|
||||||
|
"""Store log message."""
|
||||||
|
self._records.append(record)
|
||||||
|
|
||||||
|
async def get_logs(self, hass: HomeAssistant) -> list[str]:
|
||||||
|
"""Get stored logs."""
|
||||||
|
|
||||||
|
def _get_logs() -> list[str]:
|
||||||
|
# copy the queue since it can mutate while iterating
|
||||||
|
records = self._records.copy()
|
||||||
|
return [self.format(record) for record in records]
|
||||||
|
|
||||||
|
return await hass.async_add_executor_job(_get_logs)
|
@ -43,6 +43,7 @@ from .assist_pipeline import async_create_cloud_pipeline
|
|||||||
from .client import CloudClient
|
from .client import CloudClient
|
||||||
from .const import (
|
from .const import (
|
||||||
DATA_CLOUD,
|
DATA_CLOUD,
|
||||||
|
DATA_CLOUD_LOG_HANDLER,
|
||||||
EVENT_CLOUD_EVENT,
|
EVENT_CLOUD_EVENT,
|
||||||
LOGIN_MFA_TIMEOUT,
|
LOGIN_MFA_TIMEOUT,
|
||||||
PREF_ALEXA_REPORT_STATE,
|
PREF_ALEXA_REPORT_STATE,
|
||||||
@ -397,8 +398,11 @@ class DownloadSupportPackageView(HomeAssistantView):
|
|||||||
url = "/api/cloud/support_package"
|
url = "/api/cloud/support_package"
|
||||||
name = "api:cloud:support_package"
|
name = "api:cloud:support_package"
|
||||||
|
|
||||||
def _generate_markdown(
|
async def _generate_markdown(
|
||||||
self, hass_info: dict[str, Any], domains_info: dict[str, dict[str, str]]
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_info: dict[str, Any],
|
||||||
|
domains_info: dict[str, dict[str, str]],
|
||||||
) -> str:
|
) -> str:
|
||||||
def get_domain_table_markdown(domain_info: dict[str, Any]) -> str:
|
def get_domain_table_markdown(domain_info: dict[str, Any]) -> str:
|
||||||
if len(domain_info) == 0:
|
if len(domain_info) == 0:
|
||||||
@ -424,6 +428,17 @@ class DownloadSupportPackageView(HomeAssistantView):
|
|||||||
"</details>\n\n"
|
"</details>\n\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
log_handler = hass.data[DATA_CLOUD_LOG_HANDLER]
|
||||||
|
logs = "\n".join(await log_handler.get_logs(hass))
|
||||||
|
markdown += (
|
||||||
|
"## Full logs\n\n"
|
||||||
|
"<details><summary>Logs</summary>\n\n"
|
||||||
|
"```logs\n"
|
||||||
|
f"{logs}\n"
|
||||||
|
"```\n\n"
|
||||||
|
"</details>\n"
|
||||||
|
)
|
||||||
|
|
||||||
return markdown
|
return markdown
|
||||||
|
|
||||||
async def get(self, request: web.Request) -> web.Response:
|
async def get(self, request: web.Request) -> web.Response:
|
||||||
@ -433,7 +448,7 @@ class DownloadSupportPackageView(HomeAssistantView):
|
|||||||
domain_health = await get_system_health_info(hass)
|
domain_health = await get_system_health_info(hass)
|
||||||
|
|
||||||
hass_info = domain_health.pop("homeassistant", {})
|
hass_info = domain_health.pop("homeassistant", {})
|
||||||
markdown = self._generate_markdown(hass_info, domain_health)
|
markdown = await self._generate_markdown(hass, hass_info, domain_health)
|
||||||
|
|
||||||
return web.Response(
|
return web.Response(
|
||||||
body=markdown,
|
body=markdown,
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/cloud",
|
"documentation": "https://www.home-assistant.io/integrations/cloud",
|
||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["hass_nabucasa"],
|
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||||
"requirements": ["hass-nabucasa==0.89.0"],
|
"requirements": ["hass-nabucasa==0.92.0"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -53,6 +53,7 @@ from homeassistant.helpers import (
|
|||||||
)
|
)
|
||||||
from homeassistant.helpers.entity_component import EntityComponent
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
from homeassistant.helpers.event import async_track_state_added_domain
|
from homeassistant.helpers.event import async_track_state_added_domain
|
||||||
|
from homeassistant.util import language as language_util
|
||||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||||
|
|
||||||
from .chat_log import AssistantContent, async_get_chat_log
|
from .chat_log import AssistantContent, async_get_chat_log
|
||||||
@ -914,26 +915,20 @@ class DefaultAgent(ConversationEntity):
|
|||||||
def _load_intents(self, language: str) -> LanguageIntents | None:
|
def _load_intents(self, language: str) -> LanguageIntents | None:
|
||||||
"""Load all intents for language (run inside executor)."""
|
"""Load all intents for language (run inside executor)."""
|
||||||
intents_dict: dict[str, Any] = {}
|
intents_dict: dict[str, Any] = {}
|
||||||
language_variant: str | None = None
|
|
||||||
supported_langs = set(get_languages())
|
supported_langs = set(get_languages())
|
||||||
|
|
||||||
# Choose a language variant upfront and commit to it for custom
|
# Choose a language variant upfront and commit to it for custom
|
||||||
# sentences, etc.
|
# sentences, etc.
|
||||||
all_language_variants = {lang.lower(): lang for lang in supported_langs}
|
lang_matches = language_util.matches(language, supported_langs)
|
||||||
|
|
||||||
# en-US, en_US, en, ...
|
if not lang_matches:
|
||||||
for maybe_variant in _get_language_variations(language):
|
|
||||||
matching_variant = all_language_variants.get(maybe_variant.lower())
|
|
||||||
if matching_variant:
|
|
||||||
language_variant = matching_variant
|
|
||||||
break
|
|
||||||
|
|
||||||
if not language_variant:
|
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Unable to find supported language variant for %s", language
|
"Unable to find supported language variant for %s", language
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
language_variant = lang_matches[0]
|
||||||
|
|
||||||
# Load intents for this language variant
|
# Load intents for this language variant
|
||||||
lang_variant_intents = get_intents(language_variant, json_load=json_load)
|
lang_variant_intents = get_intents(language_variant, json_load=json_load)
|
||||||
|
|
||||||
|
@ -23,14 +23,14 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"discovery_error": "Failed to discover a Denon AVR Network Receiver"
|
"discovery_error": "Failed to discover a Denon AVR network receiver"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||||
"cannot_connect": "Failed to connect, please try again, disconnecting mains power and ethernet cables and reconnecting them may help",
|
"cannot_connect": "Failed to connect, please try again, disconnecting mains power and Ethernet cables and reconnecting them may help",
|
||||||
"not_denonavr_manufacturer": "Not a Denon AVR Network Receiver, discovered manufacturer did not match",
|
"not_denonavr_manufacturer": "Not a Denon AVR network receiver, discovered manufacturer did not match",
|
||||||
"not_denonavr_missing": "Not a Denon AVR Network Receiver, discovery information not complete"
|
"not_denonavr_missing": "Not a Denon AVR network receiver, discovery information not complete"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"options": {
|
"options": {
|
||||||
@ -64,7 +64,7 @@
|
|||||||
"fields": {
|
"fields": {
|
||||||
"dynamic_eq": {
|
"dynamic_eq": {
|
||||||
"name": "Dynamic equalizer",
|
"name": "Dynamic equalizer",
|
||||||
"description": "True/false for enable/disable."
|
"description": "Whether DynamicEQ should be enabled or disabled."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -60,12 +60,12 @@
|
|||||||
"description": "Requests gas prices from easyEnergy.",
|
"description": "Requests gas prices from easyEnergy.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"config_entry": {
|
"config_entry": {
|
||||||
"name": "Config Entry",
|
"name": "Config entry",
|
||||||
"description": "The configuration entry to use for this action."
|
"description": "The configuration entry to use for this action."
|
||||||
},
|
},
|
||||||
"incl_vat": {
|
"incl_vat": {
|
||||||
"name": "VAT Included",
|
"name": "VAT included",
|
||||||
"description": "Include or exclude VAT in the prices, default is true."
|
"description": "Whether the prices should include VAT."
|
||||||
},
|
},
|
||||||
"start": {
|
"start": {
|
||||||
"name": "Start",
|
"name": "Start",
|
||||||
|
@ -23,7 +23,7 @@ from homeassistant.components.climate import (
|
|||||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||||
|
|
||||||
from . import EconetConfigEntry
|
from . import EconetConfigEntry
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -35,8 +35,13 @@ ECONET_STATE_TO_HA = {
|
|||||||
ThermostatOperationMode.OFF: HVACMode.OFF,
|
ThermostatOperationMode.OFF: HVACMode.OFF,
|
||||||
ThermostatOperationMode.AUTO: HVACMode.HEAT_COOL,
|
ThermostatOperationMode.AUTO: HVACMode.HEAT_COOL,
|
||||||
ThermostatOperationMode.FAN_ONLY: HVACMode.FAN_ONLY,
|
ThermostatOperationMode.FAN_ONLY: HVACMode.FAN_ONLY,
|
||||||
|
ThermostatOperationMode.EMERGENCY_HEAT: HVACMode.HEAT,
|
||||||
|
}
|
||||||
|
HA_STATE_TO_ECONET = {
|
||||||
|
value: key
|
||||||
|
for key, value in ECONET_STATE_TO_HA.items()
|
||||||
|
if key != ThermostatOperationMode.EMERGENCY_HEAT
|
||||||
}
|
}
|
||||||
HA_STATE_TO_ECONET = {value: key for key, value in ECONET_STATE_TO_HA.items()}
|
|
||||||
|
|
||||||
ECONET_FAN_STATE_TO_HA = {
|
ECONET_FAN_STATE_TO_HA = {
|
||||||
ThermostatFanMode.AUTO: FAN_AUTO,
|
ThermostatFanMode.AUTO: FAN_AUTO,
|
||||||
@ -209,7 +214,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
|
|||||||
|
|
||||||
def turn_aux_heat_on(self) -> None:
|
def turn_aux_heat_on(self) -> None:
|
||||||
"""Turn auxiliary heater on."""
|
"""Turn auxiliary heater on."""
|
||||||
async_create_issue(
|
create_issue(
|
||||||
self.hass,
|
self.hass,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
"migrate_aux_heat",
|
"migrate_aux_heat",
|
||||||
@ -223,7 +228,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
|
|||||||
|
|
||||||
def turn_aux_heat_off(self) -> None:
|
def turn_aux_heat_off(self) -> None:
|
||||||
"""Turn auxiliary heater off."""
|
"""Turn auxiliary heater off."""
|
||||||
async_create_issue(
|
create_issue(
|
||||||
self.hass,
|
self.hass,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
"migrate_aux_heat",
|
"migrate_aux_heat",
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/econet",
|
"documentation": "https://www.home-assistant.io/integrations/econet",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["paho_mqtt", "pyeconet"],
|
"loggers": ["paho_mqtt", "pyeconet"],
|
||||||
"requirements": ["pyeconet==0.1.27"]
|
"requirements": ["pyeconet==0.1.28"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"]
|
"requirements": ["py-sucks==0.9.10", "deebot-client==12.1.0"]
|
||||||
}
|
}
|
||||||
|
@ -250,7 +250,7 @@
|
|||||||
"message": "Params are required for the command: {command}"
|
"message": "Params are required for the command: {command}"
|
||||||
},
|
},
|
||||||
"vacuum_raw_get_positions_not_supported": {
|
"vacuum_raw_get_positions_not_supported": {
|
||||||
"message": "Getting the positions of the chargers and the device itself is not supported"
|
"message": "Retrieving the positions of the chargers and the device itself is not supported"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"selector": {
|
"selector": {
|
||||||
@ -264,7 +264,7 @@
|
|||||||
"services": {
|
"services": {
|
||||||
"raw_get_positions": {
|
"raw_get_positions": {
|
||||||
"name": "Get raw positions",
|
"name": "Get raw positions",
|
||||||
"description": "Get the raw response for the positions of the chargers and the device itself."
|
"description": "Retrieves a raw response containing the positions of the chargers and the device itself."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,16 +2,18 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
|
||||||
from aiohttp import ClientError
|
from aiohttp import ClientError
|
||||||
from eheimdigital.device import EheimDigitalDevice
|
from eheimdigital.device import EheimDigitalDevice
|
||||||
from eheimdigital.hub import EheimDigitalHub
|
from eheimdigital.hub import EheimDigitalHub
|
||||||
from eheimdigital.types import EheimDeviceType
|
from eheimdigital.types import EheimDeviceType, EheimDigitalClientError
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_HOST
|
from homeassistant.const import CONF_HOST
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL
|
from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
@ -43,12 +45,14 @@ class EheimDigitalUpdateCoordinator(
|
|||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=DEFAULT_SCAN_INTERVAL,
|
update_interval=DEFAULT_SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
|
self.main_device_added_event = asyncio.Event()
|
||||||
self.hub = EheimDigitalHub(
|
self.hub = EheimDigitalHub(
|
||||||
host=self.config_entry.data[CONF_HOST],
|
host=self.config_entry.data[CONF_HOST],
|
||||||
session=async_get_clientsession(hass),
|
session=async_get_clientsession(hass),
|
||||||
loop=hass.loop,
|
loop=hass.loop,
|
||||||
receive_callback=self._async_receive_callback,
|
receive_callback=self._async_receive_callback,
|
||||||
device_found_callback=self._async_device_found,
|
device_found_callback=self._async_device_found,
|
||||||
|
main_device_added_event=self.main_device_added_event,
|
||||||
)
|
)
|
||||||
self.known_devices: set[str] = set()
|
self.known_devices: set[str] = set()
|
||||||
self.platform_callbacks: set[AsyncSetupDeviceEntitiesCallback] = set()
|
self.platform_callbacks: set[AsyncSetupDeviceEntitiesCallback] = set()
|
||||||
@ -76,8 +80,17 @@ class EheimDigitalUpdateCoordinator(
|
|||||||
self.async_set_updated_data(self.hub.devices)
|
self.async_set_updated_data(self.hub.devices)
|
||||||
|
|
||||||
async def _async_setup(self) -> None:
|
async def _async_setup(self) -> None:
|
||||||
await self.hub.connect()
|
try:
|
||||||
await self.hub.update()
|
await self.hub.connect()
|
||||||
|
async with asyncio.timeout(2):
|
||||||
|
# This event gets triggered when the first message is received from
|
||||||
|
# the device, it contains the data necessary to create the main device.
|
||||||
|
# This removes the race condition where the main device is accessed
|
||||||
|
# before the response from the device is parsed.
|
||||||
|
await self.main_device_added_event.wait()
|
||||||
|
await self.hub.update()
|
||||||
|
except (TimeoutError, EheimDigitalClientError) as err:
|
||||||
|
raise ConfigEntryNotReady from err
|
||||||
|
|
||||||
async def _async_update_data(self) -> dict[str, EheimDigitalDevice]:
|
async def _async_update_data(self) -> dict[str, EheimDigitalDevice]:
|
||||||
try:
|
try:
|
||||||
|
@ -498,7 +498,11 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self, discovery_info: ZeroconfServiceInfo
|
self, discovery_info: ZeroconfServiceInfo
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle device found via zeroconf."""
|
"""Handle device found via zeroconf."""
|
||||||
host = discovery_info.host
|
host = (
|
||||||
|
f"[{discovery_info.ip_address}]"
|
||||||
|
if discovery_info.ip_address.version == 6
|
||||||
|
else str(discovery_info.ip_address)
|
||||||
|
)
|
||||||
https_port = (
|
https_port = (
|
||||||
int(discovery_info.port)
|
int(discovery_info.port)
|
||||||
if discovery_info.port is not None
|
if discovery_info.port is not None
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["sense_energy"],
|
"loggers": ["sense_energy"],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["sense-energy==0.13.4"]
|
"requirements": ["sense-energy==0.13.5"]
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@ class EnOceanEntity(Entity):
|
|||||||
"""Initialize the device."""
|
"""Initialize the device."""
|
||||||
self.dev_id = dev_id
|
self.dev_id = dev_id
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Register callbacks."""
|
"""Register callbacks."""
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
async_dispatcher_connect(
|
async_dispatcher_connect(
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pyenphase"],
|
"loggers": ["pyenphase"],
|
||||||
"requirements": ["pyenphase==1.23.1"],
|
"requirements": ["pyenphase==1.25.1"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_enphase-envoy._tcp.local."
|
"type": "_enphase-envoy._tcp.local."
|
||||||
|
@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator
|
from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator
|
||||||
from .entity import EnvoyBaseEntity
|
from .entity import EnvoyBaseEntity, exception_handler
|
||||||
|
|
||||||
PARALLEL_UPDATES = 1
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
@ -192,6 +192,7 @@ class EnvoyRelaySelectEntity(EnvoyBaseEntity, SelectEntity):
|
|||||||
"""Return the state of the Enpower switch."""
|
"""Return the state of the Enpower switch."""
|
||||||
return self.entity_description.value_fn(self.relay)
|
return self.entity_description.value_fn(self.relay)
|
||||||
|
|
||||||
|
@exception_handler
|
||||||
async def async_select_option(self, option: str) -> None:
|
async def async_select_option(self, option: str) -> None:
|
||||||
"""Update the relay."""
|
"""Update the relay."""
|
||||||
await self.entity_description.update_fn(self.envoy, self.relay, option)
|
await self.entity_description.update_fn(self.envoy, self.relay, option)
|
||||||
@ -243,6 +244,7 @@ class EnvoyStorageSettingsSelectEntity(EnvoyBaseEntity, SelectEntity):
|
|||||||
assert self.data.tariff.storage_settings is not None
|
assert self.data.tariff.storage_settings is not None
|
||||||
return self.entity_description.value_fn(self.data.tariff.storage_settings)
|
return self.entity_description.value_fn(self.data.tariff.storage_settings)
|
||||||
|
|
||||||
|
@exception_handler
|
||||||
async def async_select_option(self, option: str) -> None:
|
async def async_select_option(self, option: str) -> None:
|
||||||
"""Update the relay."""
|
"""Update the relay."""
|
||||||
await self.entity_description.update_fn(self.envoy, option)
|
await self.entity_description.update_fn(self.envoy, option)
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
||||||
"mqtt": ["esphome/discover/#"],
|
"mqtt": ["esphome/discover/#"],
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"aioesphomeapi==29.0.0",
|
"aioesphomeapi==29.0.2",
|
||||||
"esphome-dashboard-api==1.2.3",
|
"esphome-dashboard-api==1.2.3",
|
||||||
"bleak-esphome==2.7.1"
|
"bleak-esphome==2.7.1"
|
||||||
],
|
],
|
||||||
|
@ -4,50 +4,43 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN
|
from .coordinator import (
|
||||||
from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
|
FireServiceConfigEntry,
|
||||||
|
FireServiceRotaClient,
|
||||||
|
FireServiceUpdateCoordinator,
|
||||||
|
)
|
||||||
|
|
||||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||||
|
|
||||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: FireServiceConfigEntry) -> bool:
|
||||||
"""Set up FireServiceRota from a config entry."""
|
"""Set up FireServiceRota from a config entry."""
|
||||||
|
|
||||||
hass.data.setdefault(DOMAIN, {})
|
|
||||||
|
|
||||||
client = FireServiceRotaClient(hass, entry)
|
client = FireServiceRotaClient(hass, entry)
|
||||||
await client.setup()
|
await client.setup()
|
||||||
|
|
||||||
if client.token_refresh_failure:
|
if client.token_refresh_failure:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
entry.async_on_unload(client.async_stop_listener)
|
||||||
coordinator = FireServiceUpdateCoordinator(hass, client, entry)
|
coordinator = FireServiceUpdateCoordinator(hass, client, entry)
|
||||||
|
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
hass.data[DOMAIN][entry.entry_id] = {
|
entry.runtime_data = coordinator
|
||||||
DATA_CLIENT: client,
|
|
||||||
DATA_COORDINATOR: coordinator,
|
|
||||||
}
|
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(
|
||||||
|
hass: HomeAssistant, entry: FireServiceConfigEntry
|
||||||
|
) -> bool:
|
||||||
"""Unload FireServiceRota config entry."""
|
"""Unload FireServiceRota config entry."""
|
||||||
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
await hass.async_add_executor_job(
|
|
||||||
hass.data[DOMAIN][entry.entry_id][DATA_CLIENT].websocket.stop_listener
|
|
||||||
)
|
|
||||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
|
||||||
if unload_ok:
|
|
||||||
del hass.data[DOMAIN][entry.entry_id]
|
|
||||||
return unload_ok
|
|
||||||
|
@ -10,24 +10,22 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN as FIRESERVICEROTA_DOMAIN
|
from .coordinator import (
|
||||||
from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
|
FireServiceConfigEntry,
|
||||||
|
FireServiceRotaClient,
|
||||||
|
FireServiceUpdateCoordinator,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: ConfigEntry,
|
entry: FireServiceConfigEntry,
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up FireServiceRota binary sensor based on a config entry."""
|
"""Set up FireServiceRota binary sensor based on a config entry."""
|
||||||
|
|
||||||
client: FireServiceRotaClient = hass.data[FIRESERVICEROTA_DOMAIN][entry.entry_id][
|
coordinator = entry.runtime_data
|
||||||
DATA_CLIENT
|
client = coordinator.client
|
||||||
]
|
|
||||||
|
|
||||||
coordinator: FireServiceUpdateCoordinator = hass.data[FIRESERVICEROTA_DOMAIN][
|
|
||||||
entry.entry_id
|
|
||||||
][DATA_COORDINATOR]
|
|
||||||
|
|
||||||
async_add_entities([ResponseBinarySensor(coordinator, client, entry)])
|
async_add_entities([ResponseBinarySensor(coordinator, client, entry)])
|
||||||
|
|
||||||
|
@ -28,12 +28,19 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||||
|
|
||||||
|
type FireServiceConfigEntry = ConfigEntry[FireServiceUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
class FireServiceUpdateCoordinator(DataUpdateCoordinator[dict | None]):
|
class FireServiceUpdateCoordinator(DataUpdateCoordinator[dict | None]):
|
||||||
"""Data update coordinator for FireServiceRota."""
|
"""Data update coordinator for FireServiceRota."""
|
||||||
|
|
||||||
|
config_entry: FireServiceConfigEntry
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, hass: HomeAssistant, client: FireServiceRotaClient, entry: ConfigEntry
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
client: FireServiceRotaClient,
|
||||||
|
entry: FireServiceConfigEntry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the FireServiceRota DataUpdateCoordinator."""
|
"""Initialize the FireServiceRota DataUpdateCoordinator."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
@ -213,3 +220,7 @@ class FireServiceRotaClient:
|
|||||||
)
|
)
|
||||||
|
|
||||||
await self.update_call(self.fsr.set_incident_response, self.incident_id, value)
|
await self.update_call(self.fsr.set_incident_response, self.incident_id, value)
|
||||||
|
|
||||||
|
async def async_stop_listener(self) -> None:
|
||||||
|
"""Stop listener."""
|
||||||
|
await self._hass.async_add_executor_job(self.websocket.stop_listener)
|
||||||
|
@ -4,27 +4,24 @@ import logging
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.components.sensor import SensorEntity
|
from homeassistant.components.sensor import SensorEntity
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.restore_state import RestoreEntity
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
|
|
||||||
from .const import DATA_CLIENT, DOMAIN as FIRESERVICEROTA_DOMAIN
|
from .const import DOMAIN as FIRESERVICEROTA_DOMAIN
|
||||||
from .coordinator import FireServiceRotaClient
|
from .coordinator import FireServiceConfigEntry, FireServiceRotaClient
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: ConfigEntry,
|
entry: FireServiceConfigEntry,
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up FireServiceRota sensor based on a config entry."""
|
"""Set up FireServiceRota sensor based on a config entry."""
|
||||||
client = hass.data[FIRESERVICEROTA_DOMAIN][entry.entry_id][DATA_CLIENT]
|
async_add_entities([IncidentsSensor(entry.runtime_data.client)])
|
||||||
|
|
||||||
async_add_entities([IncidentsSensor(client)])
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable-next=hass-invalid-inheritance # needs fixing
|
# pylint: disable-next=hass-invalid-inheritance # needs fixing
|
||||||
|
@ -9,21 +9,24 @@ from homeassistant.core import HomeAssistant, callback
|
|||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN as FIRESERVICEROTA_DOMAIN
|
from .const import DOMAIN as FIRESERVICEROTA_DOMAIN
|
||||||
from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
|
from .coordinator import (
|
||||||
|
FireServiceConfigEntry,
|
||||||
|
FireServiceRotaClient,
|
||||||
|
FireServiceUpdateCoordinator,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: ConfigEntry,
|
entry: FireServiceConfigEntry,
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up FireServiceRota switch based on a config entry."""
|
"""Set up FireServiceRota switch based on a config entry."""
|
||||||
client = hass.data[FIRESERVICEROTA_DOMAIN][entry.entry_id][DATA_CLIENT]
|
coordinator = entry.runtime_data
|
||||||
|
client = coordinator.client
|
||||||
coordinator = hass.data[FIRESERVICEROTA_DOMAIN][entry.entry_id][DATA_COORDINATOR]
|
|
||||||
|
|
||||||
async_add_entities([ResponseSwitch(coordinator, client, entry)])
|
async_add_entities([ResponseSwitch(coordinator, client, entry)])
|
||||||
|
|
||||||
|
@ -47,6 +47,10 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
class FlexitBinarySensor(FlexitEntity, BinarySensorEntity):
|
class FlexitBinarySensor(FlexitEntity, BinarySensorEntity):
|
||||||
"""Representation of a Flexit binary Sensor."""
|
"""Representation of a Flexit binary Sensor."""
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ from homeassistant.exceptions import HomeAssistantError
|
|||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
|
DOMAIN,
|
||||||
MAX_TEMP,
|
MAX_TEMP,
|
||||||
MIN_TEMP,
|
MIN_TEMP,
|
||||||
PRESET_TO_VENTILATION_MODE_MAP,
|
PRESET_TO_VENTILATION_MODE_MAP,
|
||||||
@ -43,6 +44,9 @@ async def async_setup_entry(
|
|||||||
async_add_entities([FlexitClimateEntity(config_entry.runtime_data)])
|
async_add_entities([FlexitClimateEntity(config_entry.runtime_data)])
|
||||||
|
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
class FlexitClimateEntity(FlexitEntity, ClimateEntity):
|
class FlexitClimateEntity(FlexitEntity, ClimateEntity):
|
||||||
"""Flexit air handling unit."""
|
"""Flexit air handling unit."""
|
||||||
|
|
||||||
@ -130,7 +134,13 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
|
|||||||
try:
|
try:
|
||||||
await self.device.set_ventilation_mode(ventilation_mode)
|
await self.device.set_ventilation_mode(ventilation_mode)
|
||||||
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
||||||
raise HomeAssistantError from exc
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="set_preset_mode",
|
||||||
|
translation_placeholders={
|
||||||
|
"preset": str(ventilation_mode),
|
||||||
|
},
|
||||||
|
) from exc
|
||||||
finally:
|
finally:
|
||||||
await self.coordinator.async_refresh()
|
await self.coordinator.async_refresh()
|
||||||
|
|
||||||
@ -150,6 +160,12 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
|
|||||||
else:
|
else:
|
||||||
await self.device.set_ventilation_mode(VENTILATION_MODE_HOME)
|
await self.device.set_ventilation_mode(VENTILATION_MODE_HOME)
|
||||||
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
||||||
raise HomeAssistantError from exc
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="set_hvac_mode",
|
||||||
|
translation_placeholders={
|
||||||
|
"mode": str(hvac_mode),
|
||||||
|
},
|
||||||
|
) from exc
|
||||||
finally:
|
finally:
|
||||||
await self.coordinator.async_refresh()
|
await self.coordinator.async_refresh()
|
||||||
|
@ -49,7 +49,11 @@ class FlexitCoordinator(DataUpdateCoordinator[FlexitBACnet]):
|
|||||||
await self.device.update()
|
await self.device.update()
|
||||||
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
||||||
raise ConfigEntryNotReady(
|
raise ConfigEntryNotReady(
|
||||||
f"Timeout while connecting to {self.config_entry.data[CONF_IP_ADDRESS]}"
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="not_ready",
|
||||||
|
translation_placeholders={
|
||||||
|
"ip": str(self.config_entry.data[CONF_IP_ADDRESS]),
|
||||||
|
},
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
return self.device
|
return self.device
|
||||||
|
@ -6,5 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/flexit_bacnet",
|
"documentation": "https://www.home-assistant.io/integrations/flexit_bacnet",
|
||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
|
"quality_scale": "bronze",
|
||||||
"requirements": ["flexit_bacnet==2.2.3"]
|
"requirements": ["flexit_bacnet==2.2.3"]
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,7 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
from .coordinator import FlexitConfigEntry, FlexitCoordinator
|
from .coordinator import FlexitConfigEntry, FlexitCoordinator
|
||||||
from .entity import FlexitEntity
|
from .entity import FlexitEntity
|
||||||
|
|
||||||
@ -205,6 +206,9 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
class FlexitNumber(FlexitEntity, NumberEntity):
|
class FlexitNumber(FlexitEntity, NumberEntity):
|
||||||
"""Representation of a Flexit Number."""
|
"""Representation of a Flexit Number."""
|
||||||
|
|
||||||
@ -246,6 +250,12 @@ class FlexitNumber(FlexitEntity, NumberEntity):
|
|||||||
try:
|
try:
|
||||||
await set_native_value_fn(int(value))
|
await set_native_value_fn(int(value))
|
||||||
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
||||||
raise HomeAssistantError from exc
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="set_value_error",
|
||||||
|
translation_placeholders={
|
||||||
|
"value": str(value),
|
||||||
|
},
|
||||||
|
) from exc
|
||||||
finally:
|
finally:
|
||||||
await self.coordinator.async_refresh()
|
await self.coordinator.async_refresh()
|
||||||
|
91
homeassistant/components/flexit_bacnet/quality_scale.yaml
Normal file
91
homeassistant/components/flexit_bacnet/quality_scale.yaml
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Integration does not define custom actions.
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not use any actions.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Entities don't subscribe to events explicitly
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup:
|
||||||
|
status: done
|
||||||
|
comment: |
|
||||||
|
Done implicitly with `await coordinator.async_config_entry_first_refresh()`.
|
||||||
|
unique-config-entry: done
|
||||||
|
# Silver
|
||||||
|
action-exceptions: done
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Integration does not use options flow.
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable:
|
||||||
|
status: done
|
||||||
|
comment: |
|
||||||
|
Done implicitly with coordinator.
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable:
|
||||||
|
status: done
|
||||||
|
comment: |
|
||||||
|
Done implicitly with coordinator.
|
||||||
|
parallel-updates: done
|
||||||
|
reauthentication-flow:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Integration doesn't require any form of authentication.
|
||||||
|
test-coverage: todo
|
||||||
|
# Gold
|
||||||
|
entity-translations: done
|
||||||
|
entity-device-class: done
|
||||||
|
devices: done
|
||||||
|
entity-category: todo
|
||||||
|
entity-disabled-by-default: todo
|
||||||
|
discovery: todo
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Device type integration.
|
||||||
|
diagnostics: todo
|
||||||
|
exception-translations: done
|
||||||
|
icon-translations: done
|
||||||
|
reconfiguration-flow: todo
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Device type integration.
|
||||||
|
discovery-update-info: todo
|
||||||
|
repair-issues:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This is not applicable for this integration.
|
||||||
|
docs-use-cases: todo
|
||||||
|
docs-supported-devices: todo
|
||||||
|
docs-supported-functions: todo
|
||||||
|
docs-data-update: done
|
||||||
|
docs-known-limitations: todo
|
||||||
|
docs-troubleshooting: todo
|
||||||
|
docs-examples: todo
|
||||||
|
|
||||||
|
# Platinum
|
||||||
|
async-dependency: todo
|
||||||
|
inject-websession: todo
|
||||||
|
strict-typing: done
|
@ -161,6 +161,10 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
class FlexitSensor(FlexitEntity, SensorEntity):
|
class FlexitSensor(FlexitEntity, SensorEntity):
|
||||||
"""Representation of a Flexit (bacnet) Sensor."""
|
"""Representation of a Flexit (bacnet) Sensor."""
|
||||||
|
|
||||||
|
@ -5,6 +5,10 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"ip_address": "[%key:common::config_flow::data::ip%]",
|
"ip_address": "[%key:common::config_flow::data::ip%]",
|
||||||
"device_id": "[%key:common::config_flow::data::device%]"
|
"device_id": "[%key:common::config_flow::data::device%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"ip_address": "The IP address of the Flexit Nordic device",
|
||||||
|
"device_id": "The device ID of the Flexit Nordic device"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -115,5 +119,22 @@
|
|||||||
"name": "Cooker hood mode"
|
"name": "Cooker hood mode"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"set_value_error": {
|
||||||
|
"message": "Failed setting the value {value}."
|
||||||
|
},
|
||||||
|
"switch_turn": {
|
||||||
|
"message": "Failed to turn the switch {state}."
|
||||||
|
},
|
||||||
|
"set_preset_mode": {
|
||||||
|
"message": "Failed to set preset mode {preset}."
|
||||||
|
},
|
||||||
|
"set_hvac_mode": {
|
||||||
|
"message": "Failed to set HVAC mode {mode}."
|
||||||
|
},
|
||||||
|
"not_ready": {
|
||||||
|
"message": "Timeout while connecting to {ip}."
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
from .coordinator import FlexitConfigEntry, FlexitCoordinator
|
from .coordinator import FlexitConfigEntry, FlexitCoordinator
|
||||||
from .entity import FlexitEntity
|
from .entity import FlexitEntity
|
||||||
|
|
||||||
@ -68,6 +69,9 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
class FlexitSwitch(FlexitEntity, SwitchEntity):
|
class FlexitSwitch(FlexitEntity, SwitchEntity):
|
||||||
"""Representation of a Flexit Switch."""
|
"""Representation of a Flexit Switch."""
|
||||||
|
|
||||||
@ -94,19 +98,31 @@ class FlexitSwitch(FlexitEntity, SwitchEntity):
|
|||||||
return self.entity_description.is_on_fn(self.coordinator.data)
|
return self.entity_description.is_on_fn(self.coordinator.data)
|
||||||
|
|
||||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
"""Turn electric heater on."""
|
"""Turn switch on."""
|
||||||
try:
|
try:
|
||||||
await self.entity_description.turn_on_fn(self.coordinator.data)
|
await self.entity_description.turn_on_fn(self.coordinator.data)
|
||||||
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
||||||
raise HomeAssistantError from exc
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="switch_turn",
|
||||||
|
translation_placeholders={
|
||||||
|
"state": "on",
|
||||||
|
},
|
||||||
|
) from exc
|
||||||
finally:
|
finally:
|
||||||
await self.coordinator.async_refresh()
|
await self.coordinator.async_refresh()
|
||||||
|
|
||||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
"""Turn electric heater off."""
|
"""Turn switch off."""
|
||||||
try:
|
try:
|
||||||
await self.entity_description.turn_off_fn(self.coordinator.data)
|
await self.entity_description.turn_off_fn(self.coordinator.data)
|
||||||
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
||||||
raise HomeAssistantError from exc
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="switch_turn",
|
||||||
|
translation_placeholders={
|
||||||
|
"state": "off",
|
||||||
|
},
|
||||||
|
) from exc
|
||||||
finally:
|
finally:
|
||||||
await self.coordinator.async_refresh()
|
await self.coordinator.async_refresh()
|
||||||
|
@ -45,10 +45,10 @@ class FloEntity(Entity):
|
|||||||
"""Return True if device is available."""
|
"""Return True if device is available."""
|
||||||
return self._device.available
|
return self._device.available
|
||||||
|
|
||||||
async def async_update(self):
|
async def async_update(self) -> None:
|
||||||
"""Update Flo entity."""
|
"""Update Flo entity."""
|
||||||
await self._device.async_request_refresh()
|
await self._device.async_request_refresh()
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self) -> None:
|
||||||
"""When entity is added to hass."""
|
"""When entity is added to hass."""
|
||||||
self.async_on_remove(self._device.async_add_listener(self.async_write_ha_state))
|
self.async_on_remove(self._device.async_add_listener(self.async_write_ha_state))
|
||||||
|
@ -36,11 +36,11 @@
|
|||||||
"issues": {
|
"issues": {
|
||||||
"import_failed_not_allowed_path": {
|
"import_failed_not_allowed_path": {
|
||||||
"title": "The Folder Watcher YAML configuration could not be imported",
|
"title": "The Folder Watcher YAML configuration could not be imported",
|
||||||
"description": "Configuring Folder Watcher using YAML is being removed but your configuration could not be imported as the folder {path} is not in the configured allowlist.\n\nPlease add it to `{config_variable}` in config.yaml and restart Home Assistant to import it and fix this issue."
|
"description": "Configuring Folder Watcher using YAML is being removed but your configuration could not be imported as the folder {path} is not in the configured allowlist.\n\nPlease add it to `{config_variable}` in configuration.yaml and restart Home Assistant to import it and fix this issue."
|
||||||
},
|
},
|
||||||
"setup_not_allowed_path": {
|
"setup_not_allowed_path": {
|
||||||
"title": "The Folder Watcher configuration for {path} could not start",
|
"title": "The Folder Watcher configuration for {path} could not start",
|
||||||
"description": "The path {path} is not accessible or not allowed to be accessed.\n\nPlease check the path is accessible and add it to `{config_variable}` in config.yaml and restart Home Assistant to fix this issue."
|
"description": "The path {path} is not accessible or not allowed to be accessed.\n\nPlease check the path is accessible and add it to `{config_variable}` in configuration.yaml and restart Home Assistant to fix this issue."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
@ -3,8 +3,13 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Sequence
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pyforked_daapd import ForkedDaapdAPI
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import PlatformNotReady
|
from homeassistant.exceptions import PlatformNotReady
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
|
|
||||||
@ -26,15 +31,15 @@ WEBSOCKET_RECONNECT_TIME = 30 # seconds
|
|||||||
class ForkedDaapdUpdater:
|
class ForkedDaapdUpdater:
|
||||||
"""Manage updates for the forked-daapd device."""
|
"""Manage updates for the forked-daapd device."""
|
||||||
|
|
||||||
def __init__(self, hass, api, entry_id):
|
def __init__(self, hass: HomeAssistant, api: ForkedDaapdAPI, entry_id: str) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._api = api
|
self._api = api
|
||||||
self.websocket_handler = None
|
self.websocket_handler: asyncio.Task[None] | None = None
|
||||||
self._all_output_ids = set()
|
self._all_output_ids: set[str] = set()
|
||||||
self._entry_id = entry_id
|
self._entry_id = entry_id
|
||||||
|
|
||||||
async def async_init(self):
|
async def async_init(self) -> None:
|
||||||
"""Perform async portion of class initialization."""
|
"""Perform async portion of class initialization."""
|
||||||
if not (server_config := await self._api.get_request("config")):
|
if not (server_config := await self._api.get_request("config")):
|
||||||
raise PlatformNotReady
|
raise PlatformNotReady
|
||||||
@ -51,7 +56,7 @@ class ForkedDaapdUpdater:
|
|||||||
else:
|
else:
|
||||||
_LOGGER.error("Invalid websocket port")
|
_LOGGER.error("Invalid websocket port")
|
||||||
|
|
||||||
async def _disconnected_callback(self):
|
async def _disconnected_callback(self) -> None:
|
||||||
"""Send update signals when the websocket gets disconnected."""
|
"""Send update signals when the websocket gets disconnected."""
|
||||||
async_dispatcher_send(
|
async_dispatcher_send(
|
||||||
self.hass, SIGNAL_UPDATE_MASTER.format(self._entry_id), False
|
self.hass, SIGNAL_UPDATE_MASTER.format(self._entry_id), False
|
||||||
@ -60,9 +65,9 @@ class ForkedDaapdUpdater:
|
|||||||
self.hass, SIGNAL_UPDATE_OUTPUTS.format(self._entry_id), []
|
self.hass, SIGNAL_UPDATE_OUTPUTS.format(self._entry_id), []
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _update(self, update_types):
|
async def _update(self, update_types_sequence: Sequence[str]) -> None:
|
||||||
"""Private update method."""
|
"""Private update method."""
|
||||||
update_types = set(update_types)
|
update_types = set(update_types_sequence)
|
||||||
update_events = {}
|
update_events = {}
|
||||||
_LOGGER.debug("Updating %s", update_types)
|
_LOGGER.debug("Updating %s", update_types)
|
||||||
if (
|
if (
|
||||||
@ -127,8 +132,8 @@ class ForkedDaapdUpdater:
|
|||||||
self.hass, SIGNAL_UPDATE_MASTER.format(self._entry_id), True
|
self.hass, SIGNAL_UPDATE_MASTER.format(self._entry_id), True
|
||||||
)
|
)
|
||||||
|
|
||||||
def _add_zones(self, outputs):
|
def _add_zones(self, outputs: list[dict[str, Any]]) -> None:
|
||||||
outputs_to_add = []
|
outputs_to_add: list[dict[str, Any]] = []
|
||||||
for output in outputs:
|
for output in outputs:
|
||||||
if output["id"] not in self._all_output_ids:
|
if output["id"] not in self._all_output_ids:
|
||||||
self._all_output_ids.add(output["id"])
|
self._all_output_ids.add(output["id"])
|
||||||
|
@ -85,9 +85,9 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up forked-daapd from a config entry."""
|
"""Set up forked-daapd from a config entry."""
|
||||||
host = config_entry.data[CONF_HOST]
|
host: str = config_entry.data[CONF_HOST]
|
||||||
port = config_entry.data[CONF_PORT]
|
port: int = config_entry.data[CONF_PORT]
|
||||||
password = config_entry.data[CONF_PASSWORD]
|
password: str = config_entry.data[CONF_PASSWORD]
|
||||||
forked_daapd_api = ForkedDaapdAPI(
|
forked_daapd_api = ForkedDaapdAPI(
|
||||||
async_get_clientsession(hass), host, port, password
|
async_get_clientsession(hass), host, port, password
|
||||||
)
|
)
|
||||||
@ -95,8 +95,6 @@ async def async_setup_entry(
|
|||||||
clientsession=async_get_clientsession(hass),
|
clientsession=async_get_clientsession(hass),
|
||||||
api=forked_daapd_api,
|
api=forked_daapd_api,
|
||||||
ip_address=host,
|
ip_address=host,
|
||||||
api_port=port,
|
|
||||||
api_password=password,
|
|
||||||
config_entry=config_entry,
|
config_entry=config_entry,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -240,9 +238,7 @@ class ForkedDaapdMaster(MediaPlayerEntity):
|
|||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, clientsession, api, ip_address, config_entry):
|
||||||
self, clientsession, api, ip_address, api_port, api_password, config_entry
|
|
||||||
):
|
|
||||||
"""Initialize the ForkedDaapd Master Device."""
|
"""Initialize the ForkedDaapd Master Device."""
|
||||||
# Leave the api public so the browse media helpers can use it
|
# Leave the api public so the browse media helpers can use it
|
||||||
self.api = api
|
self.api = api
|
||||||
@ -269,7 +265,7 @@ class ForkedDaapdMaster(MediaPlayerEntity):
|
|||||||
self._on_remove = None
|
self._on_remove = None
|
||||||
self._available = False
|
self._available = False
|
||||||
self._clientsession = clientsession
|
self._clientsession = clientsession
|
||||||
self._config_entry = config_entry
|
self._entry_id = config_entry.entry_id
|
||||||
self.update_options(config_entry.options)
|
self.update_options(config_entry.options)
|
||||||
self._paused_event = asyncio.Event()
|
self._paused_event = asyncio.Event()
|
||||||
self._pause_requested = False
|
self._pause_requested = False
|
||||||
@ -282,42 +278,42 @@ class ForkedDaapdMaster(MediaPlayerEntity):
|
|||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
async_dispatcher_connect(
|
async_dispatcher_connect(
|
||||||
self.hass,
|
self.hass,
|
||||||
SIGNAL_UPDATE_PLAYER.format(self._config_entry.entry_id),
|
SIGNAL_UPDATE_PLAYER.format(self._entry_id),
|
||||||
self._update_player,
|
self._update_player,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
async_dispatcher_connect(
|
async_dispatcher_connect(
|
||||||
self.hass,
|
self.hass,
|
||||||
SIGNAL_UPDATE_QUEUE.format(self._config_entry.entry_id),
|
SIGNAL_UPDATE_QUEUE.format(self._entry_id),
|
||||||
self._update_queue,
|
self._update_queue,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
async_dispatcher_connect(
|
async_dispatcher_connect(
|
||||||
self.hass,
|
self.hass,
|
||||||
SIGNAL_UPDATE_OUTPUTS.format(self._config_entry.entry_id),
|
SIGNAL_UPDATE_OUTPUTS.format(self._entry_id),
|
||||||
self._update_outputs,
|
self._update_outputs,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
async_dispatcher_connect(
|
async_dispatcher_connect(
|
||||||
self.hass,
|
self.hass,
|
||||||
SIGNAL_UPDATE_MASTER.format(self._config_entry.entry_id),
|
SIGNAL_UPDATE_MASTER.format(self._entry_id),
|
||||||
self._update_callback,
|
self._update_callback,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
async_dispatcher_connect(
|
async_dispatcher_connect(
|
||||||
self.hass,
|
self.hass,
|
||||||
SIGNAL_CONFIG_OPTIONS_UPDATE.format(self._config_entry.entry_id),
|
SIGNAL_CONFIG_OPTIONS_UPDATE.format(self._entry_id),
|
||||||
self.update_options,
|
self.update_options,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
async_dispatcher_connect(
|
async_dispatcher_connect(
|
||||||
self.hass,
|
self.hass,
|
||||||
SIGNAL_UPDATE_DATABASE.format(self._config_entry.entry_id),
|
SIGNAL_UPDATE_DATABASE.format(self._entry_id),
|
||||||
self._update_database,
|
self._update_database,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -411,9 +407,9 @@ class ForkedDaapdMaster(MediaPlayerEntity):
|
|||||||
self._track_info = defaultdict(str)
|
self._track_info = defaultdict(str)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def unique_id(self):
|
def unique_id(self) -> str:
|
||||||
"""Return unique ID."""
|
"""Return unique ID."""
|
||||||
return self._config_entry.entry_id
|
return self._entry_id
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
|
@ -35,7 +35,7 @@
|
|||||||
"services": {
|
"services": {
|
||||||
"ptz": {
|
"ptz": {
|
||||||
"name": "PTZ",
|
"name": "PTZ",
|
||||||
"description": "Pan/Tilt action for Foscam camera.",
|
"description": "Moves a Foscam camera to a specified direction.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"movement": {
|
"movement": {
|
||||||
"name": "Movement",
|
"name": "Movement",
|
||||||
@ -49,7 +49,7 @@
|
|||||||
},
|
},
|
||||||
"ptz_preset": {
|
"ptz_preset": {
|
||||||
"name": "PTZ preset",
|
"name": "PTZ preset",
|
||||||
"description": "PTZ Preset action for Foscam camera.",
|
"description": "Moves a Foscam camera to a predefined position.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"preset_name": {
|
"preset_name": {
|
||||||
"name": "Preset name",
|
"name": "Preset name",
|
||||||
|
@ -196,6 +196,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
|||||||
self.hass = hass
|
self.hass = hass
|
||||||
self.host = host
|
self.host = host
|
||||||
self.mesh_role = MeshRoles.NONE
|
self.mesh_role = MeshRoles.NONE
|
||||||
|
self.mesh_wifi_uplink = False
|
||||||
self.device_conn_type: str | None = None
|
self.device_conn_type: str | None = None
|
||||||
self.device_is_router: bool = False
|
self.device_is_router: bool = False
|
||||||
self.password = password
|
self.password = password
|
||||||
@ -610,6 +611,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
|||||||
ssid=interf.get("ssid", ""),
|
ssid=interf.get("ssid", ""),
|
||||||
type=interf["type"],
|
type=interf["type"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if interf["type"].lower() == "wlan" and interf[
|
||||||
|
"name"
|
||||||
|
].lower().startswith("uplink"):
|
||||||
|
self.mesh_wifi_uplink = True
|
||||||
|
|
||||||
if dr.format_mac(int_mac) == self.mac:
|
if dr.format_mac(int_mac) == self.mac:
|
||||||
self.mesh_role = MeshRoles(node["mesh_role"])
|
self.mesh_role = MeshRoles(node["mesh_role"])
|
||||||
|
|
||||||
|
@ -207,8 +207,9 @@ async def async_all_entities_list(
|
|||||||
local_ip: str,
|
local_ip: str,
|
||||||
) -> list[Entity]:
|
) -> list[Entity]:
|
||||||
"""Get a list of all entities."""
|
"""Get a list of all entities."""
|
||||||
|
|
||||||
if avm_wrapper.mesh_role == MeshRoles.SLAVE:
|
if avm_wrapper.mesh_role == MeshRoles.SLAVE:
|
||||||
|
if not avm_wrapper.mesh_wifi_uplink:
|
||||||
|
return [*await _async_wifi_entities_list(avm_wrapper, device_friendly_name)]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
return [
|
return [
|
||||||
@ -565,6 +566,9 @@ class FritzBoxWifiSwitch(FritzBoxBaseSwitch):
|
|||||||
|
|
||||||
self._attributes = {}
|
self._attributes = {}
|
||||||
self._attr_entity_category = EntityCategory.CONFIG
|
self._attr_entity_category = EntityCategory.CONFIG
|
||||||
|
self._attr_entity_registry_enabled_default = (
|
||||||
|
avm_wrapper.mesh_role is not MeshRoles.SLAVE
|
||||||
|
)
|
||||||
self._network_num = network_num
|
self._network_num = network_num
|
||||||
|
|
||||||
switch_info = SwitchInfo(
|
switch_info = SwitchInfo(
|
||||||
|
@ -21,5 +21,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["home-assistant-frontend==20250210.0"]
|
"requirements": ["home-assistant-frontend==20250214.0"]
|
||||||
}
|
}
|
||||||
|
@ -4,25 +4,27 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
|
|
||||||
from .const import DOMAIN, PLATFORMS
|
from .const import PLATFORMS
|
||||||
from .manager import GeoJsonFeedEntityManager
|
from .manager import GeoJsonConfigEntry, GeoJsonFeedEntityManager
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant, config_entry: GeoJsonConfigEntry
|
||||||
|
) -> bool:
|
||||||
"""Set up the GeoJSON events component as config entry."""
|
"""Set up the GeoJSON events component as config entry."""
|
||||||
feeds = hass.data.setdefault(DOMAIN, {})
|
|
||||||
# Create feed entity manager for all platforms.
|
# Create feed entity manager for all platforms.
|
||||||
manager = GeoJsonFeedEntityManager(hass, config_entry)
|
manager = GeoJsonFeedEntityManager(hass, config_entry)
|
||||||
feeds[config_entry.entry_id] = manager
|
|
||||||
_LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id)
|
_LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id)
|
||||||
await remove_orphaned_entities(hass, config_entry.entry_id)
|
await remove_orphaned_entities(hass, config_entry.entry_id)
|
||||||
|
|
||||||
|
config_entry.runtime_data = manager
|
||||||
|
config_entry.async_on_unload(manager.async_stop)
|
||||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||||
await manager.async_init()
|
await manager.async_init()
|
||||||
return True
|
return True
|
||||||
@ -46,10 +48,6 @@ async def remove_orphaned_entities(hass: HomeAssistant, entry_id: str) -> None:
|
|||||||
entity_registry.async_remove(entry.entity_id)
|
entity_registry.async_remove(entry.entity_id)
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: GeoJsonConfigEntry) -> bool:
|
||||||
"""Unload the GeoJSON events config entry."""
|
"""Unload the GeoJSON events config entry."""
|
||||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
if unload_ok:
|
|
||||||
manager: GeoJsonFeedEntityManager = hass.data[DOMAIN].pop(entry.entry_id)
|
|
||||||
await manager.async_stop()
|
|
||||||
return unload_ok
|
|
||||||
|
@ -9,31 +9,24 @@ from typing import Any
|
|||||||
from aio_geojson_generic_client.feed_entry import GenericFeedEntry
|
from aio_geojson_generic_client.feed_entry import GenericFeedEntry
|
||||||
|
|
||||||
from homeassistant.components.geo_location import GeolocationEvent
|
from homeassistant.components.geo_location import GeolocationEvent
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import UnitOfLength
|
from homeassistant.const import UnitOfLength
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from . import GeoJsonFeedEntityManager
|
from .const import ATTR_EXTERNAL_ID, SIGNAL_DELETE_ENTITY, SIGNAL_UPDATE_ENTITY, SOURCE
|
||||||
from .const import (
|
from .manager import GeoJsonConfigEntry, GeoJsonFeedEntityManager
|
||||||
ATTR_EXTERNAL_ID,
|
|
||||||
DOMAIN,
|
|
||||||
SIGNAL_DELETE_ENTITY,
|
|
||||||
SIGNAL_UPDATE_ENTITY,
|
|
||||||
SOURCE,
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: ConfigEntry,
|
entry: GeoJsonConfigEntry,
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the GeoJSON Events platform."""
|
"""Set up the GeoJSON Events platform."""
|
||||||
manager: GeoJsonFeedEntityManager = hass.data[DOMAIN][entry.entry_id]
|
manager = entry.runtime_data
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_add_geolocation(
|
def async_add_geolocation(
|
||||||
|
@ -25,6 +25,8 @@ from .const import (
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type GeoJsonConfigEntry = ConfigEntry[GeoJsonFeedEntityManager]
|
||||||
|
|
||||||
|
|
||||||
class GeoJsonFeedEntityManager:
|
class GeoJsonFeedEntityManager:
|
||||||
"""Feed Entity Manager for GeoJSON feeds."""
|
"""Feed Entity Manager for GeoJSON feeds."""
|
||||||
|
@ -10,7 +10,7 @@ from google.oauth2.credentials import Credentials
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import conversation
|
from homeassistant.components import conversation
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, Platform
|
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, Platform
|
||||||
from homeassistant.core import (
|
from homeassistant.core import (
|
||||||
HomeAssistant,
|
HomeAssistant,
|
||||||
@ -99,12 +99,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
hass.data[DOMAIN].pop(entry.entry_id)
|
||||||
loaded_entries = [
|
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
entry
|
|
||||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
|
||||||
if entry.state == ConfigEntryState.LOADED
|
|
||||||
]
|
|
||||||
if len(loaded_entries) == 1:
|
|
||||||
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
||||||
hass.services.async_remove(DOMAIN, service_name)
|
hass.services.async_remove(DOMAIN, service_name)
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_NAME, Platform
|
from homeassistant.const import CONF_NAME, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv, discovery
|
from homeassistant.helpers import config_validation as cv, discovery
|
||||||
@ -59,12 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -
|
|||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
loaded_entries = [
|
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
entry
|
|
||||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
|
||||||
if entry.state == ConfigEntryState.LOADED
|
|
||||||
]
|
|
||||||
if len(loaded_entries) == 1:
|
|
||||||
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
||||||
hass.services.async_remove(DOMAIN, service_name)
|
hass.services.async_remove(DOMAIN, service_name)
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ from gspread.exceptions import APIError
|
|||||||
from gspread.utils import ValueInputOption
|
from gspread.utils import ValueInputOption
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall
|
from homeassistant.core import HomeAssistant, ServiceCall
|
||||||
from homeassistant.exceptions import (
|
from homeassistant.exceptions import (
|
||||||
@ -81,12 +81,7 @@ async def async_unload_entry(
|
|||||||
hass: HomeAssistant, entry: GoogleSheetsConfigEntry
|
hass: HomeAssistant, entry: GoogleSheetsConfigEntry
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
loaded_entries = [
|
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
entry
|
|
||||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
|
||||||
if entry.state == ConfigEntryState.LOADED
|
|
||||||
]
|
|
||||||
if len(loaded_entries) == 1:
|
|
||||||
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
||||||
hass.services.async_remove(DOMAIN, service_name)
|
hass.services.async_remove(DOMAIN, service_name)
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ from aioguardian import Client
|
|||||||
from aioguardian.errors import GuardianError
|
from aioguardian.errors import GuardianError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_DEVICE_ID,
|
ATTR_DEVICE_ID,
|
||||||
CONF_DEVICE_ID,
|
CONF_DEVICE_ID,
|
||||||
@ -247,12 +247,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
if unload_ok:
|
if unload_ok:
|
||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
hass.data[DOMAIN].pop(entry.entry_id)
|
||||||
|
|
||||||
loaded_entries = [
|
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
entry
|
|
||||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
|
||||||
if entry.state == ConfigEntryState.LOADED
|
|
||||||
]
|
|
||||||
if len(loaded_entries) == 1:
|
|
||||||
# If this is the last loaded instance of Guardian, deregister any services
|
# If this is the last loaded instance of Guardian, deregister any services
|
||||||
# defined during integration setup:
|
# defined during integration setup:
|
||||||
for service_name in SERVICES:
|
for service_name in SERVICES:
|
||||||
|
@ -43,7 +43,7 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
|||||||
translation_key=HabiticaImageEntity.AVATAR,
|
translation_key=HabiticaImageEntity.AVATAR,
|
||||||
)
|
)
|
||||||
_attr_content_type = "image/png"
|
_attr_content_type = "image/png"
|
||||||
_current_appearance: Avatar | None = None
|
_avatar: Avatar | None = None
|
||||||
_cache: bytes | None = None
|
_cache: bytes | None = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@ -55,13 +55,13 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
|||||||
super().__init__(coordinator, self.entity_description)
|
super().__init__(coordinator, self.entity_description)
|
||||||
ImageEntity.__init__(self, hass)
|
ImageEntity.__init__(self, hass)
|
||||||
self._attr_image_last_updated = dt_util.utcnow()
|
self._attr_image_last_updated = dt_util.utcnow()
|
||||||
|
self._avatar = extract_avatar(self.coordinator.data.user)
|
||||||
|
|
||||||
def _handle_coordinator_update(self) -> None:
|
def _handle_coordinator_update(self) -> None:
|
||||||
"""Check if equipped gear and other things have changed since last avatar image generation."""
|
"""Check if equipped gear and other things have changed since last avatar image generation."""
|
||||||
new_appearance = extract_avatar(self.coordinator.data.user)
|
|
||||||
|
|
||||||
if self._current_appearance != new_appearance:
|
if self._avatar != self.coordinator.data.user:
|
||||||
self._current_appearance = new_appearance
|
self._avatar = extract_avatar(self.coordinator.data.user)
|
||||||
self._attr_image_last_updated = dt_util.utcnow()
|
self._attr_image_last_updated = dt_util.utcnow()
|
||||||
self._cache = None
|
self._cache = None
|
||||||
|
|
||||||
@ -69,8 +69,6 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
|||||||
|
|
||||||
async def async_image(self) -> bytes | None:
|
async def async_image(self) -> bytes | None:
|
||||||
"""Return cached bytes, otherwise generate new avatar."""
|
"""Return cached bytes, otherwise generate new avatar."""
|
||||||
if not self._cache and self._current_appearance:
|
if not self._cache and self._avatar:
|
||||||
self._cache = await self.coordinator.generate_avatar(
|
self._cache = await self.coordinator.generate_avatar(self._avatar)
|
||||||
self._current_appearance
|
|
||||||
)
|
|
||||||
return self._cache
|
return self._cache
|
||||||
|
@ -6,5 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/habitica",
|
"documentation": "https://www.home-assistant.io/integrations/habitica",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["habiticalib"],
|
"loggers": ["habiticalib"],
|
||||||
|
"quality_scale": "platinum",
|
||||||
"requirements": ["habiticalib==0.3.7"]
|
"requirements": ["habiticalib==0.3.7"]
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ rules:
|
|||||||
status: exempt
|
status: exempt
|
||||||
comment: No supportable devices.
|
comment: No supportable devices.
|
||||||
docs-supported-functions: done
|
docs-supported-functions: done
|
||||||
docs-troubleshooting: todo
|
docs-troubleshooting: done
|
||||||
docs-use-cases: done
|
docs-use-cases: done
|
||||||
dynamic-devices:
|
dynamic-devices:
|
||||||
status: exempt
|
status: exempt
|
||||||
|
@ -69,3 +69,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
|
|||||||
async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_remove_config_entry_device(
|
||||||
|
hass: HomeAssistant, entry: HeosConfigEntry, device: dr.DeviceEntry
|
||||||
|
) -> bool:
|
||||||
|
"""Remove config entry from device if no longer present."""
|
||||||
|
return not any(
|
||||||
|
(domain, key)
|
||||||
|
for domain, key in device.identifiers
|
||||||
|
if domain == DOMAIN and int(key) in entry.runtime_data.heos.players
|
||||||
|
)
|
||||||
|
@ -16,6 +16,7 @@ from pyheos import (
|
|||||||
HeosError,
|
HeosError,
|
||||||
HeosNowPlayingMedia,
|
HeosNowPlayingMedia,
|
||||||
HeosOptions,
|
HeosOptions,
|
||||||
|
HeosPlayer,
|
||||||
MediaItem,
|
MediaItem,
|
||||||
MediaType,
|
MediaType,
|
||||||
PlayerUpdateResult,
|
PlayerUpdateResult,
|
||||||
@ -58,6 +59,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
|
|||||||
credentials=credentials,
|
credentials=credentials,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
self._platform_callbacks: list[Callable[[Sequence[HeosPlayer]], None]] = []
|
||||||
self._update_sources_pending: bool = False
|
self._update_sources_pending: bool = False
|
||||||
self._source_list: list[str] = []
|
self._source_list: list[str] = []
|
||||||
self._favorites: dict[int, MediaItem] = {}
|
self._favorites: dict[int, MediaItem] = {}
|
||||||
@ -124,6 +126,27 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
|
|||||||
self.async_update_listeners()
|
self.async_update_listeners()
|
||||||
return remove_listener
|
return remove_listener
|
||||||
|
|
||||||
|
def async_add_platform_callback(
|
||||||
|
self, add_entities_callback: Callable[[Sequence[HeosPlayer]], None]
|
||||||
|
) -> None:
|
||||||
|
"""Add a callback to add entities for a platform."""
|
||||||
|
self._platform_callbacks.append(add_entities_callback)
|
||||||
|
|
||||||
|
def _async_handle_player_update_result(
|
||||||
|
self, update_result: PlayerUpdateResult
|
||||||
|
) -> None:
|
||||||
|
"""Handle a player update result."""
|
||||||
|
if update_result.added_player_ids and self._platform_callbacks:
|
||||||
|
new_players = [
|
||||||
|
self.heos.players[player_id]
|
||||||
|
for player_id in update_result.added_player_ids
|
||||||
|
]
|
||||||
|
for add_entities_callback in self._platform_callbacks:
|
||||||
|
add_entities_callback(new_players)
|
||||||
|
|
||||||
|
if update_result.updated_player_ids:
|
||||||
|
self._async_update_player_ids(update_result.updated_player_ids)
|
||||||
|
|
||||||
async def _async_on_auth_failure(self) -> None:
|
async def _async_on_auth_failure(self) -> None:
|
||||||
"""Handle when the user credentials are no longer valid."""
|
"""Handle when the user credentials are no longer valid."""
|
||||||
assert self.config_entry is not None
|
assert self.config_entry is not None
|
||||||
@ -147,8 +170,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
|
|||||||
"""Handle a controller event, such as players or groups changed."""
|
"""Handle a controller event, such as players or groups changed."""
|
||||||
if event == const.EVENT_PLAYERS_CHANGED:
|
if event == const.EVENT_PLAYERS_CHANGED:
|
||||||
assert data is not None
|
assert data is not None
|
||||||
if data.updated_player_ids:
|
self._async_handle_player_update_result(data)
|
||||||
self._async_update_player_ids(data.updated_player_ids)
|
|
||||||
elif (
|
elif (
|
||||||
event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED)
|
event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED)
|
||||||
and not self._update_sources_pending
|
and not self._update_sources_pending
|
||||||
@ -242,9 +264,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
|
|||||||
except HeosError as error:
|
except HeosError as error:
|
||||||
_LOGGER.error("Unable to refresh players: %s", error)
|
_LOGGER.error("Unable to refresh players: %s", error)
|
||||||
return
|
return
|
||||||
# After reconnecting, player_id may have changed
|
self._async_handle_player_update_result(player_updates)
|
||||||
if player_updates.updated_player_ids:
|
|
||||||
self._async_update_player_ids(player_updates.updated_player_ids)
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_get_source_list(self) -> list[str]:
|
def async_get_source_list(self) -> list[str]:
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Awaitable, Callable, Coroutine
|
from collections.abc import Awaitable, Callable, Coroutine, Sequence
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import reduce, wraps
|
from functools import reduce, wraps
|
||||||
from operator import ior
|
from operator import ior
|
||||||
@ -93,11 +93,16 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add media players for a config entry."""
|
"""Add media players for a config entry."""
|
||||||
devices = [
|
|
||||||
HeosMediaPlayer(entry.runtime_data, player)
|
def add_entities_callback(players: Sequence[HeosPlayer]) -> None:
|
||||||
for player in entry.runtime_data.heos.players.values()
|
"""Add entities for each player."""
|
||||||
]
|
async_add_entities(
|
||||||
async_add_entities(devices)
|
[HeosMediaPlayer(entry.runtime_data, player) for player in players]
|
||||||
|
)
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
coordinator.async_add_platform_callback(add_entities_callback)
|
||||||
|
add_entities_callback(list(coordinator.heos.players.values()))
|
||||||
|
|
||||||
|
|
||||||
type _FuncType[**_P] = Callable[_P, Awaitable[Any]]
|
type _FuncType[**_P] = Callable[_P, Awaitable[Any]]
|
||||||
|
@ -49,7 +49,7 @@ rules:
|
|||||||
docs-supported-functions: done
|
docs-supported-functions: done
|
||||||
docs-troubleshooting: done
|
docs-troubleshooting: done
|
||||||
docs-use-cases: done
|
docs-use-cases: done
|
||||||
dynamic-devices: todo
|
dynamic-devices: done
|
||||||
entity-category: done
|
entity-category: done
|
||||||
entity-device-class: done
|
entity-device-class: done
|
||||||
entity-disabled-by-default: done
|
entity-disabled-by-default: done
|
||||||
@ -57,8 +57,8 @@ rules:
|
|||||||
exception-translations: done
|
exception-translations: done
|
||||||
icon-translations: done
|
icon-translations: done
|
||||||
reconfiguration-flow: done
|
reconfiguration-flow: done
|
||||||
repair-issues: todo
|
repair-issues: done
|
||||||
stale-devices: todo
|
stale-devices: done
|
||||||
# Platinum
|
# Platinum
|
||||||
async-dependency: done
|
async-dependency: done
|
||||||
inject-websession:
|
inject-websession:
|
||||||
|
@ -9,5 +9,5 @@
|
|||||||
},
|
},
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["apyhiveapi"],
|
"loggers": ["apyhiveapi"],
|
||||||
"requirements": ["pyhive-integration==1.0.1"]
|
"requirements": ["pyhive-integration==1.0.2"]
|
||||||
}
|
}
|
||||||
|
@ -35,7 +35,7 @@ class SW16Entity(Entity):
|
|||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self):
|
def available(self) -> bool:
|
||||||
"""Return True if entity is available."""
|
"""Return True if entity is available."""
|
||||||
return bool(self._client.is_connected)
|
return bool(self._client.is_connected)
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ class SW16Entity(Entity):
|
|||||||
"""Update availability state."""
|
"""Update availability state."""
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Register update callback."""
|
"""Register update callback."""
|
||||||
self._client.register_status_callback(
|
self._client.register_status_callback(
|
||||||
self.handle_event_callback, self._device_port
|
self.handle_event_callback, self._device_port
|
||||||
|
@ -2,11 +2,19 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from aiohomeconnect.client import Client as HomeConnectClient
|
from aiohomeconnect.client import Client as HomeConnectClient
|
||||||
from aiohomeconnect.model import CommandKey, Option, OptionKey, ProgramKey, SettingKey
|
from aiohomeconnect.model import (
|
||||||
|
ArrayOfOptions,
|
||||||
|
CommandKey,
|
||||||
|
Option,
|
||||||
|
OptionKey,
|
||||||
|
ProgramKey,
|
||||||
|
SettingKey,
|
||||||
|
)
|
||||||
from aiohomeconnect.model.error import HomeConnectError
|
from aiohomeconnect.model.error import HomeConnectError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -19,34 +27,74 @@ from homeassistant.helpers import (
|
|||||||
device_registry as dr,
|
device_registry as dr,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
|
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
|
||||||
|
from homeassistant.helpers.issue_registry import (
|
||||||
|
IssueSeverity,
|
||||||
|
async_create_issue,
|
||||||
|
async_delete_issue,
|
||||||
|
)
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .api import AsyncConfigEntryAuth
|
from .api import AsyncConfigEntryAuth
|
||||||
from .const import (
|
from .const import (
|
||||||
|
AFFECTS_TO_ACTIVE_PROGRAM,
|
||||||
|
AFFECTS_TO_SELECTED_PROGRAM,
|
||||||
|
ATTR_AFFECTS_TO,
|
||||||
ATTR_KEY,
|
ATTR_KEY,
|
||||||
ATTR_PROGRAM,
|
ATTR_PROGRAM,
|
||||||
ATTR_UNIT,
|
ATTR_UNIT,
|
||||||
ATTR_VALUE,
|
ATTR_VALUE,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
OLD_NEW_UNIQUE_ID_SUFFIX_MAP,
|
OLD_NEW_UNIQUE_ID_SUFFIX_MAP,
|
||||||
|
PROGRAM_ENUM_OPTIONS,
|
||||||
SERVICE_OPTION_ACTIVE,
|
SERVICE_OPTION_ACTIVE,
|
||||||
SERVICE_OPTION_SELECTED,
|
SERVICE_OPTION_SELECTED,
|
||||||
SERVICE_PAUSE_PROGRAM,
|
SERVICE_PAUSE_PROGRAM,
|
||||||
SERVICE_RESUME_PROGRAM,
|
SERVICE_RESUME_PROGRAM,
|
||||||
SERVICE_SELECT_PROGRAM,
|
SERVICE_SELECT_PROGRAM,
|
||||||
|
SERVICE_SET_PROGRAM_AND_OPTIONS,
|
||||||
SERVICE_SETTING,
|
SERVICE_SETTING,
|
||||||
SERVICE_START_PROGRAM,
|
SERVICE_START_PROGRAM,
|
||||||
SVE_TRANSLATION_PLACEHOLDER_KEY,
|
SVE_TRANSLATION_PLACEHOLDER_KEY,
|
||||||
SVE_TRANSLATION_PLACEHOLDER_PROGRAM,
|
SVE_TRANSLATION_PLACEHOLDER_PROGRAM,
|
||||||
SVE_TRANSLATION_PLACEHOLDER_VALUE,
|
SVE_TRANSLATION_PLACEHOLDER_VALUE,
|
||||||
|
TRANSLATION_KEYS_PROGRAMS_MAP,
|
||||||
)
|
)
|
||||||
from .coordinator import HomeConnectConfigEntry, HomeConnectCoordinator
|
from .coordinator import HomeConnectConfigEntry, HomeConnectCoordinator
|
||||||
from .utils import get_dict_from_home_connect_error
|
from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
|
PROGRAM_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(key): (
|
||||||
|
key,
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
for key, value in {
|
||||||
|
OptionKey.BSH_COMMON_DURATION: int,
|
||||||
|
OptionKey.BSH_COMMON_START_IN_RELATIVE: int,
|
||||||
|
OptionKey.BSH_COMMON_FINISH_IN_RELATIVE: int,
|
||||||
|
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_FILL_QUANTITY: int,
|
||||||
|
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_MULTIPLE_BEVERAGES: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_INTENSIV_ZONE: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_BRILLIANCE_DRY: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_VARIO_SPEED_PLUS: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_SILENCE_ON_DEMAND: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_HALF_LOAD: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_EXTRA_DRY: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_HYGIENE_PLUS: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_ECO_DRY: bool,
|
||||||
|
OptionKey.DISHCARE_DISHWASHER_ZEOLITE_DRY: bool,
|
||||||
|
OptionKey.COOKING_OVEN_SETPOINT_TEMPERATURE: int,
|
||||||
|
OptionKey.COOKING_OVEN_FAST_PRE_HEAT: bool,
|
||||||
|
OptionKey.LAUNDRY_CARE_WASHER_I_DOS_1_ACTIVE: bool,
|
||||||
|
OptionKey.LAUNDRY_CARE_WASHER_I_DOS_2_ACTIVE: bool,
|
||||||
|
}.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
SERVICE_SETTING_SCHEMA = vol.Schema(
|
SERVICE_SETTING_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_DEVICE_ID): str,
|
vol.Required(ATTR_DEVICE_ID): str,
|
||||||
@ -58,6 +106,7 @@ SERVICE_SETTING_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# DEPRECATED: Remove in 2025.9.0
|
||||||
SERVICE_OPTION_SCHEMA = vol.Schema(
|
SERVICE_OPTION_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_DEVICE_ID): str,
|
vol.Required(ATTR_DEVICE_ID): str,
|
||||||
@ -70,6 +119,7 @@ SERVICE_OPTION_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# DEPRECATED: Remove in 2025.9.0
|
||||||
SERVICE_PROGRAM_SCHEMA = vol.Any(
|
SERVICE_PROGRAM_SCHEMA = vol.Any(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_DEVICE_ID): str,
|
vol.Required(ATTR_DEVICE_ID): str,
|
||||||
@ -93,6 +143,46 @@ SERVICE_PROGRAM_SCHEMA = vol.Any(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _require_program_or_at_least_one_option(data: dict) -> dict:
|
||||||
|
if ATTR_PROGRAM not in data and not any(
|
||||||
|
option_key in data for option_key in (PROGRAM_ENUM_OPTIONS | PROGRAM_OPTIONS)
|
||||||
|
):
|
||||||
|
raise ServiceValidationError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="required_program_or_one_option_at_least",
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
SERVICE_PROGRAM_AND_OPTIONS_SCHEMA = vol.All(
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_DEVICE_ID): str,
|
||||||
|
vol.Required(ATTR_AFFECTS_TO): vol.In(
|
||||||
|
[AFFECTS_TO_ACTIVE_PROGRAM, AFFECTS_TO_SELECTED_PROGRAM]
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_PROGRAM): vol.In(TRANSLATION_KEYS_PROGRAMS_MAP.keys()),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.extend(
|
||||||
|
{
|
||||||
|
vol.Optional(translation_key): vol.In(allowed_values.keys())
|
||||||
|
for translation_key, (
|
||||||
|
key,
|
||||||
|
allowed_values,
|
||||||
|
) in PROGRAM_ENUM_OPTIONS.items()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.extend(
|
||||||
|
{
|
||||||
|
vol.Optional(translation_key): schema
|
||||||
|
for translation_key, (key, schema) in PROGRAM_OPTIONS.items()
|
||||||
|
}
|
||||||
|
),
|
||||||
|
_require_program_or_at_least_one_option,
|
||||||
|
)
|
||||||
|
|
||||||
SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str})
|
SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str})
|
||||||
|
|
||||||
PLATFORMS = [
|
PLATFORMS = [
|
||||||
@ -144,7 +234,7 @@ async def _get_client_and_ha_id(
|
|||||||
return entry.runtime_data.client, ha_id
|
return entry.runtime_data.client, ha_id
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901
|
||||||
"""Set up Home Connect component."""
|
"""Set up Home Connect component."""
|
||||||
|
|
||||||
async def _async_service_program(call: ServiceCall, start: bool):
|
async def _async_service_program(call: ServiceCall, start: bool):
|
||||||
@ -165,6 +255,57 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async_create_issue(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
"deprecated_set_program_and_option_actions",
|
||||||
|
breaks_in_ha_version="2025.9.0",
|
||||||
|
is_fixable=True,
|
||||||
|
is_persistent=True,
|
||||||
|
severity=IssueSeverity.WARNING,
|
||||||
|
translation_key="deprecated_set_program_and_option_actions",
|
||||||
|
translation_placeholders={
|
||||||
|
"new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS,
|
||||||
|
"remove_release": "2025.9.0",
|
||||||
|
"deprecated_action_yaml": "\n".join(
|
||||||
|
[
|
||||||
|
"```yaml",
|
||||||
|
f"action: {DOMAIN}.{SERVICE_START_PROGRAM if start else SERVICE_SELECT_PROGRAM}",
|
||||||
|
"data:",
|
||||||
|
f" {ATTR_DEVICE_ID}: DEVICE_ID",
|
||||||
|
f" {ATTR_PROGRAM}: {program}",
|
||||||
|
*([f" {ATTR_KEY}: {options[0].key}"] if options else []),
|
||||||
|
*([f" {ATTR_VALUE}: {options[0].value}"] if options else []),
|
||||||
|
*(
|
||||||
|
[f" {ATTR_UNIT}: {options[0].unit}"]
|
||||||
|
if options and options[0].unit
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
"```",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
"new_action_yaml": "\n ".join(
|
||||||
|
[
|
||||||
|
"```yaml",
|
||||||
|
f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}",
|
||||||
|
"data:",
|
||||||
|
f" {ATTR_DEVICE_ID}: DEVICE_ID",
|
||||||
|
f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if start else AFFECTS_TO_SELECTED_PROGRAM}",
|
||||||
|
f" {ATTR_PROGRAM}: {bsh_key_to_translation_key(program.value)}",
|
||||||
|
*(
|
||||||
|
[
|
||||||
|
f" {bsh_key_to_translation_key(options[0].key)}: {options[0].value}"
|
||||||
|
]
|
||||||
|
if options
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
"```",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
"repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if start:
|
if start:
|
||||||
await client.start_program(ha_id, program_key=program, options=options)
|
await client.start_program(ha_id, program_key=program, options=options)
|
||||||
@ -189,6 +330,44 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
unit = call.data.get(ATTR_UNIT)
|
unit = call.data.get(ATTR_UNIT)
|
||||||
client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID])
|
client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID])
|
||||||
|
|
||||||
|
async_create_issue(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
"deprecated_set_program_and_option_actions",
|
||||||
|
breaks_in_ha_version="2025.9.0",
|
||||||
|
is_fixable=True,
|
||||||
|
is_persistent=True,
|
||||||
|
severity=IssueSeverity.WARNING,
|
||||||
|
translation_key="deprecated_set_program_and_option_actions",
|
||||||
|
translation_placeholders={
|
||||||
|
"new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS,
|
||||||
|
"remove_release": "2025.9.0",
|
||||||
|
"deprecated_action_yaml": "\n".join(
|
||||||
|
[
|
||||||
|
"```yaml",
|
||||||
|
f"action: {DOMAIN}.{SERVICE_OPTION_ACTIVE if active else SERVICE_OPTION_SELECTED}",
|
||||||
|
"data:",
|
||||||
|
f" {ATTR_DEVICE_ID}: DEVICE_ID",
|
||||||
|
f" {ATTR_KEY}: {option_key}",
|
||||||
|
f" {ATTR_VALUE}: {value}",
|
||||||
|
*([f" {ATTR_UNIT}: {unit}"] if unit else []),
|
||||||
|
"```",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
"new_action_yaml": "\n ".join(
|
||||||
|
[
|
||||||
|
"```yaml",
|
||||||
|
f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}",
|
||||||
|
"data:",
|
||||||
|
f" {ATTR_DEVICE_ID}: DEVICE_ID",
|
||||||
|
f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if active else AFFECTS_TO_SELECTED_PROGRAM}",
|
||||||
|
f" {bsh_key_to_translation_key(option_key)}: {value}",
|
||||||
|
"```",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
"repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)",
|
||||||
|
},
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
if active:
|
if active:
|
||||||
await client.set_active_program_option(
|
await client.set_active_program_option(
|
||||||
@ -272,6 +451,76 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
"""Service for selecting a program."""
|
"""Service for selecting a program."""
|
||||||
await _async_service_program(call, False)
|
await _async_service_program(call, False)
|
||||||
|
|
||||||
|
async def async_service_set_program_and_options(call: ServiceCall):
|
||||||
|
"""Service for setting a program and options."""
|
||||||
|
data = dict(call.data)
|
||||||
|
program = data.pop(ATTR_PROGRAM, None)
|
||||||
|
affects_to = data.pop(ATTR_AFFECTS_TO)
|
||||||
|
client, ha_id = await _get_client_and_ha_id(hass, data.pop(ATTR_DEVICE_ID))
|
||||||
|
|
||||||
|
options: list[Option] = []
|
||||||
|
|
||||||
|
for option, value in data.items():
|
||||||
|
if option in PROGRAM_ENUM_OPTIONS:
|
||||||
|
options.append(
|
||||||
|
Option(
|
||||||
|
PROGRAM_ENUM_OPTIONS[option][0],
|
||||||
|
PROGRAM_ENUM_OPTIONS[option][1][value],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif option in PROGRAM_OPTIONS:
|
||||||
|
option_key = PROGRAM_OPTIONS[option][0]
|
||||||
|
options.append(Option(option_key, value))
|
||||||
|
|
||||||
|
method_call: Awaitable[Any]
|
||||||
|
exception_translation_key: str
|
||||||
|
if program:
|
||||||
|
program = (
|
||||||
|
program
|
||||||
|
if isinstance(program, ProgramKey)
|
||||||
|
else TRANSLATION_KEYS_PROGRAMS_MAP[program]
|
||||||
|
)
|
||||||
|
|
||||||
|
if affects_to == AFFECTS_TO_ACTIVE_PROGRAM:
|
||||||
|
method_call = client.start_program(
|
||||||
|
ha_id, program_key=program, options=options
|
||||||
|
)
|
||||||
|
exception_translation_key = "start_program"
|
||||||
|
elif affects_to == AFFECTS_TO_SELECTED_PROGRAM:
|
||||||
|
method_call = client.set_selected_program(
|
||||||
|
ha_id, program_key=program, options=options
|
||||||
|
)
|
||||||
|
exception_translation_key = "select_program"
|
||||||
|
else:
|
||||||
|
array_of_options = ArrayOfOptions(options)
|
||||||
|
if affects_to == AFFECTS_TO_ACTIVE_PROGRAM:
|
||||||
|
method_call = client.set_active_program_options(
|
||||||
|
ha_id, array_of_options=array_of_options
|
||||||
|
)
|
||||||
|
exception_translation_key = "set_options_active_program"
|
||||||
|
else:
|
||||||
|
# affects_to is AFFECTS_TO_SELECTED_PROGRAM
|
||||||
|
method_call = client.set_selected_program_options(
|
||||||
|
ha_id, array_of_options=array_of_options
|
||||||
|
)
|
||||||
|
exception_translation_key = "set_options_selected_program"
|
||||||
|
|
||||||
|
try:
|
||||||
|
await method_call
|
||||||
|
except HomeConnectError as err:
|
||||||
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key=exception_translation_key,
|
||||||
|
translation_placeholders={
|
||||||
|
**get_dict_from_home_connect_error(err),
|
||||||
|
**(
|
||||||
|
{SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program}
|
||||||
|
if program
|
||||||
|
else {}
|
||||||
|
),
|
||||||
|
},
|
||||||
|
) from err
|
||||||
|
|
||||||
async def async_service_start_program(call: ServiceCall):
|
async def async_service_start_program(call: ServiceCall):
|
||||||
"""Service for starting a program."""
|
"""Service for starting a program."""
|
||||||
await _async_service_program(call, True)
|
await _async_service_program(call, True)
|
||||||
@ -315,6 +564,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
async_service_start_program,
|
async_service_start_program,
|
||||||
schema=SERVICE_PROGRAM_SCHEMA,
|
schema=SERVICE_PROGRAM_SCHEMA,
|
||||||
)
|
)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_SET_PROGRAM_AND_OPTIONS,
|
||||||
|
async_service_set_program_and_options,
|
||||||
|
schema=SERVICE_PROGRAM_AND_OPTIONS_SCHEMA,
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -349,6 +604,7 @@ async def async_unload_entry(
|
|||||||
hass: HomeAssistant, entry: HomeConnectConfigEntry
|
hass: HomeAssistant, entry: HomeConnectConfigEntry
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
|
async_delete_issue(hass, DOMAIN, "deprecated_set_program_and_option_actions")
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
"""Constants for the Home Connect integration."""
|
"""Constants for the Home Connect integration."""
|
||||||
|
|
||||||
from aiohomeconnect.model import EventKey, SettingKey, StatusKey
|
from typing import cast
|
||||||
|
|
||||||
|
from aiohomeconnect.model import EventKey, OptionKey, ProgramKey, SettingKey, StatusKey
|
||||||
|
|
||||||
|
from .utils import bsh_key_to_translation_key
|
||||||
|
|
||||||
DOMAIN = "home_connect"
|
DOMAIN = "home_connect"
|
||||||
|
|
||||||
@ -52,15 +56,18 @@ SERVICE_OPTION_SELECTED = "set_option_selected"
|
|||||||
SERVICE_PAUSE_PROGRAM = "pause_program"
|
SERVICE_PAUSE_PROGRAM = "pause_program"
|
||||||
SERVICE_RESUME_PROGRAM = "resume_program"
|
SERVICE_RESUME_PROGRAM = "resume_program"
|
||||||
SERVICE_SELECT_PROGRAM = "select_program"
|
SERVICE_SELECT_PROGRAM = "select_program"
|
||||||
|
SERVICE_SET_PROGRAM_AND_OPTIONS = "set_program_and_options"
|
||||||
SERVICE_SETTING = "change_setting"
|
SERVICE_SETTING = "change_setting"
|
||||||
SERVICE_START_PROGRAM = "start_program"
|
SERVICE_START_PROGRAM = "start_program"
|
||||||
|
|
||||||
|
ATTR_AFFECTS_TO = "affects_to"
|
||||||
ATTR_KEY = "key"
|
ATTR_KEY = "key"
|
||||||
ATTR_PROGRAM = "program"
|
ATTR_PROGRAM = "program"
|
||||||
ATTR_UNIT = "unit"
|
ATTR_UNIT = "unit"
|
||||||
ATTR_VALUE = "value"
|
ATTR_VALUE = "value"
|
||||||
|
|
||||||
|
AFFECTS_TO_ACTIVE_PROGRAM = "active_program"
|
||||||
|
AFFECTS_TO_SELECTED_PROGRAM = "selected_program"
|
||||||
|
|
||||||
SVE_TRANSLATION_KEY_SET_SETTING = "set_setting_entity"
|
SVE_TRANSLATION_KEY_SET_SETTING = "set_setting_entity"
|
||||||
SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME = "appliance_name"
|
SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME = "appliance_name"
|
||||||
@ -70,6 +77,269 @@ SVE_TRANSLATION_PLACEHOLDER_KEY = "key"
|
|||||||
SVE_TRANSLATION_PLACEHOLDER_VALUE = "value"
|
SVE_TRANSLATION_PLACEHOLDER_VALUE = "value"
|
||||||
|
|
||||||
|
|
||||||
|
TRANSLATION_KEYS_PROGRAMS_MAP = {
|
||||||
|
bsh_key_to_translation_key(program.value): cast(ProgramKey, program)
|
||||||
|
for program in ProgramKey
|
||||||
|
if program != ProgramKey.UNKNOWN
|
||||||
|
}
|
||||||
|
|
||||||
|
PROGRAMS_TRANSLATION_KEYS_MAP = {
|
||||||
|
value: key for key, value in TRANSLATION_KEYS_PROGRAMS_MAP.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
REFERENCE_MAP_ID_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.TempMap",
|
||||||
|
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.Map1",
|
||||||
|
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.Map2",
|
||||||
|
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.Map3",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
CLEANING_MODE_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Silent",
|
||||||
|
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Standard",
|
||||||
|
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Power",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
BEAN_AMOUNT_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.VeryMild",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.Mild",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.MildPlus",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.Normal",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.NormalPlus",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.Strong",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.StrongPlus",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.VeryStrong",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.VeryStrongPlus",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.ExtraStrong",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.DoubleShot",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.DoubleShotPlus",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.DoubleShotPlusPlus",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.TripleShot",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.TripleShotPlus",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.CoffeeGround",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
COFFEE_TEMPERATURE_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.88C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.90C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.92C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.94C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.95C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.96C",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
BEAN_CONTAINER_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanContainerSelection.Right",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.BeanContainerSelection.Left",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
FLOW_RATE_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.FlowRate.Normal",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.FlowRate.Intense",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.FlowRate.IntensePlus",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
COFFEE_MILK_RATIO_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.10Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.20Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.25Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.30Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.40Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.50Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.55Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.60Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.65Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.67Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.70Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.75Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.80Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.85Percent",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.90Percent",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
HOT_WATER_TEMPERATURE_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.WhiteTea",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.GreenTea",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.BlackTea",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.50C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.55C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.60C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.65C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.70C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.75C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.80C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.85C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.90C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.95C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.97C",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.122F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.131F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.140F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.149F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.158F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.167F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.176F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.185F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.194F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.203F",
|
||||||
|
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.Max",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
DRYING_TARGET_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"LaundryCare.Dryer.EnumType.DryingTarget.IronDry",
|
||||||
|
"LaundryCare.Dryer.EnumType.DryingTarget.GentleDry",
|
||||||
|
"LaundryCare.Dryer.EnumType.DryingTarget.CupboardDry",
|
||||||
|
"LaundryCare.Dryer.EnumType.DryingTarget.CupboardDryPlus",
|
||||||
|
"LaundryCare.Dryer.EnumType.DryingTarget.ExtraDry",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
VENTING_LEVEL_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"Cooking.Hood.EnumType.Stage.FanOff",
|
||||||
|
"Cooking.Hood.EnumType.Stage.FanStage01",
|
||||||
|
"Cooking.Hood.EnumType.Stage.FanStage02",
|
||||||
|
"Cooking.Hood.EnumType.Stage.FanStage03",
|
||||||
|
"Cooking.Hood.EnumType.Stage.FanStage04",
|
||||||
|
"Cooking.Hood.EnumType.Stage.FanStage05",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
INTENSIVE_LEVEL_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"Cooking.Hood.EnumType.IntensiveStage.IntensiveStageOff",
|
||||||
|
"Cooking.Hood.EnumType.IntensiveStage.IntensiveStage1",
|
||||||
|
"Cooking.Hood.EnumType.IntensiveStage.IntensiveStage2",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
WARMING_LEVEL_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"Cooking.Oven.EnumType.WarmingLevel.Low",
|
||||||
|
"Cooking.Oven.EnumType.WarmingLevel.Medium",
|
||||||
|
"Cooking.Oven.EnumType.WarmingLevel.High",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
TEMPERATURE_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.Cold",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.GC20",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.GC30",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.GC40",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.GC50",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.GC60",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.GC70",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.GC80",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.GC90",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.UlCold",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.UlWarm",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.UlHot",
|
||||||
|
"LaundryCare.Washer.EnumType.Temperature.UlExtraHot",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
SPIN_SPEED_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.Off",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.RPM400",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.RPM600",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.RPM800",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.RPM1000",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.RPM1200",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.RPM1400",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.RPM1600",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.UlOff",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.UlLow",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.UlMedium",
|
||||||
|
"LaundryCare.Washer.EnumType.SpinSpeed.UlHigh",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
VARIO_PERFECT_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option): option
|
||||||
|
for option in (
|
||||||
|
"LaundryCare.Common.EnumType.VarioPerfect.Off",
|
||||||
|
"LaundryCare.Common.EnumType.VarioPerfect.EcoPerfect",
|
||||||
|
"LaundryCare.Common.EnumType.VarioPerfect.SpeedPerfect",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
PROGRAM_ENUM_OPTIONS = {
|
||||||
|
bsh_key_to_translation_key(option_key): (
|
||||||
|
option_key,
|
||||||
|
options,
|
||||||
|
)
|
||||||
|
for option_key, options in (
|
||||||
|
(
|
||||||
|
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_REFERENCE_MAP_ID,
|
||||||
|
REFERENCE_MAP_ID_OPTIONS,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_CLEANING_MODE,
|
||||||
|
CLEANING_MODE_OPTIONS,
|
||||||
|
),
|
||||||
|
(OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_BEAN_AMOUNT, BEAN_AMOUNT_OPTIONS),
|
||||||
|
(
|
||||||
|
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_COFFEE_TEMPERATURE,
|
||||||
|
COFFEE_TEMPERATURE_OPTIONS,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_BEAN_CONTAINER_SELECTION,
|
||||||
|
BEAN_CONTAINER_OPTIONS,
|
||||||
|
),
|
||||||
|
(OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_FLOW_RATE, FLOW_RATE_OPTIONS),
|
||||||
|
(
|
||||||
|
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_COFFEE_MILK_RATIO,
|
||||||
|
COFFEE_MILK_RATIO_OPTIONS,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_HOT_WATER_TEMPERATURE,
|
||||||
|
HOT_WATER_TEMPERATURE_OPTIONS,
|
||||||
|
),
|
||||||
|
(OptionKey.LAUNDRY_CARE_DRYER_DRYING_TARGET, DRYING_TARGET_OPTIONS),
|
||||||
|
(OptionKey.COOKING_COMMON_HOOD_VENTING_LEVEL, VENTING_LEVEL_OPTIONS),
|
||||||
|
(OptionKey.COOKING_COMMON_HOOD_INTENSIVE_LEVEL, INTENSIVE_LEVEL_OPTIONS),
|
||||||
|
(OptionKey.COOKING_OVEN_WARMING_LEVEL, WARMING_LEVEL_OPTIONS),
|
||||||
|
(OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE, TEMPERATURE_OPTIONS),
|
||||||
|
(OptionKey.LAUNDRY_CARE_WASHER_SPIN_SPEED, SPIN_SPEED_OPTIONS),
|
||||||
|
(OptionKey.LAUNDRY_CARE_COMMON_VARIO_PERFECT, VARIO_PERFECT_OPTIONS),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
OLD_NEW_UNIQUE_ID_SUFFIX_MAP = {
|
OLD_NEW_UNIQUE_ID_SUFFIX_MAP = {
|
||||||
"ChildLock": SettingKey.BSH_COMMON_CHILD_LOCK,
|
"ChildLock": SettingKey.BSH_COMMON_CHILD_LOCK,
|
||||||
"Operation State": StatusKey.BSH_COMMON_OPERATION_STATE,
|
"Operation State": StatusKey.BSH_COMMON_OPERATION_STATE,
|
||||||
|
@ -18,6 +18,9 @@
|
|||||||
"set_option_selected": {
|
"set_option_selected": {
|
||||||
"service": "mdi:gesture-tap"
|
"service": "mdi:gesture-tap"
|
||||||
},
|
},
|
||||||
|
"set_program_and_options": {
|
||||||
|
"service": "mdi:form-select"
|
||||||
|
},
|
||||||
"change_setting": {
|
"change_setting": {
|
||||||
"service": "mdi:cog"
|
"service": "mdi:cog"
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "Home Connect",
|
"name": "Home Connect",
|
||||||
"codeowners": ["@DavidMStraub", "@Diegorro98", "@MartinHjelmare"],
|
"codeowners": ["@DavidMStraub", "@Diegorro98", "@MartinHjelmare"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"dependencies": ["application_credentials"],
|
"dependencies": ["application_credentials", "repairs"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["aiohomeconnect"],
|
"loggers": ["aiohomeconnect"],
|
||||||
|
@ -15,24 +15,20 @@ from homeassistant.exceptions import HomeAssistantError
|
|||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from .common import setup_home_connect_entry
|
from .common import setup_home_connect_entry
|
||||||
from .const import APPLIANCES_WITH_PROGRAMS, DOMAIN, SVE_TRANSLATION_PLACEHOLDER_PROGRAM
|
from .const import (
|
||||||
|
APPLIANCES_WITH_PROGRAMS,
|
||||||
|
DOMAIN,
|
||||||
|
PROGRAMS_TRANSLATION_KEYS_MAP,
|
||||||
|
SVE_TRANSLATION_PLACEHOLDER_PROGRAM,
|
||||||
|
TRANSLATION_KEYS_PROGRAMS_MAP,
|
||||||
|
)
|
||||||
from .coordinator import (
|
from .coordinator import (
|
||||||
HomeConnectApplianceData,
|
HomeConnectApplianceData,
|
||||||
HomeConnectConfigEntry,
|
HomeConnectConfigEntry,
|
||||||
HomeConnectCoordinator,
|
HomeConnectCoordinator,
|
||||||
)
|
)
|
||||||
from .entity import HomeConnectEntity
|
from .entity import HomeConnectEntity
|
||||||
from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error
|
from .utils import get_dict_from_home_connect_error
|
||||||
|
|
||||||
TRANSLATION_KEYS_PROGRAMS_MAP = {
|
|
||||||
bsh_key_to_translation_key(program.value): cast(ProgramKey, program)
|
|
||||||
for program in ProgramKey
|
|
||||||
if program != ProgramKey.UNKNOWN
|
|
||||||
}
|
|
||||||
|
|
||||||
PROGRAMS_TRANSLATION_KEYS_MAP = {
|
|
||||||
value: key for key, value in TRANSLATION_KEYS_PROGRAMS_MAP.items()
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
@ -46,6 +46,558 @@ select_program:
|
|||||||
example: "seconds"
|
example: "seconds"
|
||||||
selector:
|
selector:
|
||||||
text:
|
text:
|
||||||
|
set_program_and_options:
|
||||||
|
fields:
|
||||||
|
device_id:
|
||||||
|
required: true
|
||||||
|
selector:
|
||||||
|
device:
|
||||||
|
integration: home_connect
|
||||||
|
affects_to:
|
||||||
|
example: active_program
|
||||||
|
required: true
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
translation_key: affects_to
|
||||||
|
options:
|
||||||
|
- active_program
|
||||||
|
- selected_program
|
||||||
|
program:
|
||||||
|
example: dishcare_dishwasher_program_auto2
|
||||||
|
required: true
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
custom_value: false
|
||||||
|
translation_key: programs
|
||||||
|
options:
|
||||||
|
- consumer_products_cleaning_robot_program_cleaning_clean_all
|
||||||
|
- consumer_products_cleaning_robot_program_cleaning_clean_map
|
||||||
|
- consumer_products_cleaning_robot_program_basic_go_home
|
||||||
|
- consumer_products_coffee_maker_program_beverage_ristretto
|
||||||
|
- consumer_products_coffee_maker_program_beverage_espresso
|
||||||
|
- consumer_products_coffee_maker_program_beverage_espresso_doppio
|
||||||
|
- consumer_products_coffee_maker_program_beverage_coffee
|
||||||
|
- consumer_products_coffee_maker_program_beverage_x_l_coffee
|
||||||
|
- consumer_products_coffee_maker_program_beverage_caffe_grande
|
||||||
|
- consumer_products_coffee_maker_program_beverage_espresso_macchiato
|
||||||
|
- consumer_products_coffee_maker_program_beverage_cappuccino
|
||||||
|
- consumer_products_coffee_maker_program_beverage_latte_macchiato
|
||||||
|
- consumer_products_coffee_maker_program_beverage_caffe_latte
|
||||||
|
- consumer_products_coffee_maker_program_beverage_milk_froth
|
||||||
|
- consumer_products_coffee_maker_program_beverage_warm_milk
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_kleiner_brauner
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_grosser_brauner
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_verlaengerter
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_verlaengerter_braun
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_wiener_melange
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_flat_white
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_cortado
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_cafe_cortado
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_cafe_con_leche
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_cafe_au_lait
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_doppio
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_kaapi
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_koffie_verkeerd
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_galao
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_garoto
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_americano
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_red_eye
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_black_eye
|
||||||
|
- consumer_products_coffee_maker_program_coffee_world_dead_eye
|
||||||
|
- consumer_products_coffee_maker_program_beverage_hot_water
|
||||||
|
- dishcare_dishwasher_program_pre_rinse
|
||||||
|
- dishcare_dishwasher_program_auto_1
|
||||||
|
- dishcare_dishwasher_program_auto_2
|
||||||
|
- dishcare_dishwasher_program_auto_3
|
||||||
|
- dishcare_dishwasher_program_eco_50
|
||||||
|
- dishcare_dishwasher_program_quick_45
|
||||||
|
- dishcare_dishwasher_program_intensiv_70
|
||||||
|
- dishcare_dishwasher_program_normal_65
|
||||||
|
- dishcare_dishwasher_program_glas_40
|
||||||
|
- dishcare_dishwasher_program_glass_care
|
||||||
|
- dishcare_dishwasher_program_night_wash
|
||||||
|
- dishcare_dishwasher_program_quick_65
|
||||||
|
- dishcare_dishwasher_program_normal_45
|
||||||
|
- dishcare_dishwasher_program_intensiv_45
|
||||||
|
- dishcare_dishwasher_program_auto_half_load
|
||||||
|
- dishcare_dishwasher_program_intensiv_power
|
||||||
|
- dishcare_dishwasher_program_magic_daily
|
||||||
|
- dishcare_dishwasher_program_super_60
|
||||||
|
- dishcare_dishwasher_program_kurz_60
|
||||||
|
- dishcare_dishwasher_program_express_sparkle_65
|
||||||
|
- dishcare_dishwasher_program_machine_care
|
||||||
|
- dishcare_dishwasher_program_steam_fresh
|
||||||
|
- dishcare_dishwasher_program_maximum_cleaning
|
||||||
|
- dishcare_dishwasher_program_mixed_load
|
||||||
|
- laundry_care_dryer_program_cotton
|
||||||
|
- laundry_care_dryer_program_synthetic
|
||||||
|
- laundry_care_dryer_program_mix
|
||||||
|
- laundry_care_dryer_program_blankets
|
||||||
|
- laundry_care_dryer_program_business_shirts
|
||||||
|
- laundry_care_dryer_program_down_feathers
|
||||||
|
- laundry_care_dryer_program_hygiene
|
||||||
|
- laundry_care_dryer_program_jeans
|
||||||
|
- laundry_care_dryer_program_outdoor
|
||||||
|
- laundry_care_dryer_program_synthetic_refresh
|
||||||
|
- laundry_care_dryer_program_towels
|
||||||
|
- laundry_care_dryer_program_delicates
|
||||||
|
- laundry_care_dryer_program_super_40
|
||||||
|
- laundry_care_dryer_program_shirts_15
|
||||||
|
- laundry_care_dryer_program_pillow
|
||||||
|
- laundry_care_dryer_program_anti_shrink
|
||||||
|
- laundry_care_dryer_program_my_time_my_drying_time
|
||||||
|
- laundry_care_dryer_program_time_cold
|
||||||
|
- laundry_care_dryer_program_time_warm
|
||||||
|
- laundry_care_dryer_program_in_basket
|
||||||
|
- laundry_care_dryer_program_time_cold_fix_time_cold_20
|
||||||
|
- laundry_care_dryer_program_time_cold_fix_time_cold_30
|
||||||
|
- laundry_care_dryer_program_time_cold_fix_time_cold_60
|
||||||
|
- laundry_care_dryer_program_time_warm_fix_time_warm_30
|
||||||
|
- laundry_care_dryer_program_time_warm_fix_time_warm_40
|
||||||
|
- laundry_care_dryer_program_time_warm_fix_time_warm_60
|
||||||
|
- laundry_care_dryer_program_dessous
|
||||||
|
- cooking_common_program_hood_automatic
|
||||||
|
- cooking_common_program_hood_venting
|
||||||
|
- cooking_common_program_hood_delayed_shut_off
|
||||||
|
- cooking_oven_program_heating_mode_pre_heating
|
||||||
|
- cooking_oven_program_heating_mode_hot_air
|
||||||
|
- cooking_oven_program_heating_mode_hot_air_eco
|
||||||
|
- cooking_oven_program_heating_mode_hot_air_grilling
|
||||||
|
- cooking_oven_program_heating_mode_top_bottom_heating
|
||||||
|
- cooking_oven_program_heating_mode_top_bottom_heating_eco
|
||||||
|
- cooking_oven_program_heating_mode_bottom_heating
|
||||||
|
- cooking_oven_program_heating_mode_pizza_setting
|
||||||
|
- cooking_oven_program_heating_mode_slow_cook
|
||||||
|
- cooking_oven_program_heating_mode_intensive_heat
|
||||||
|
- cooking_oven_program_heating_mode_keep_warm
|
||||||
|
- cooking_oven_program_heating_mode_preheat_ovenware
|
||||||
|
- cooking_oven_program_heating_mode_frozen_heatup_special
|
||||||
|
- cooking_oven_program_heating_mode_desiccation
|
||||||
|
- cooking_oven_program_heating_mode_defrost
|
||||||
|
- cooking_oven_program_heating_mode_proof
|
||||||
|
- cooking_oven_program_heating_mode_hot_air_30_steam
|
||||||
|
- cooking_oven_program_heating_mode_hot_air_60_steam
|
||||||
|
- cooking_oven_program_heating_mode_hot_air_80_steam
|
||||||
|
- cooking_oven_program_heating_mode_hot_air_100_steam
|
||||||
|
- cooking_oven_program_heating_mode_sabbath_programme
|
||||||
|
- cooking_oven_program_microwave_90_watt
|
||||||
|
- cooking_oven_program_microwave_180_watt
|
||||||
|
- cooking_oven_program_microwave_360_watt
|
||||||
|
- cooking_oven_program_microwave_600_watt
|
||||||
|
- cooking_oven_program_microwave_900_watt
|
||||||
|
- cooking_oven_program_microwave_1000_watt
|
||||||
|
- cooking_oven_program_microwave_max
|
||||||
|
- cooking_oven_program_heating_mode_warming_drawer
|
||||||
|
- laundry_care_washer_program_cotton
|
||||||
|
- laundry_care_washer_program_cotton_cotton_eco
|
||||||
|
- laundry_care_washer_program_cotton_eco_4060
|
||||||
|
- laundry_care_washer_program_cotton_colour
|
||||||
|
- laundry_care_washer_program_easy_care
|
||||||
|
- laundry_care_washer_program_mix
|
||||||
|
- laundry_care_washer_program_mix_night_wash
|
||||||
|
- laundry_care_washer_program_delicates_silk
|
||||||
|
- laundry_care_washer_program_wool
|
||||||
|
- laundry_care_washer_program_sensitive
|
||||||
|
- laundry_care_washer_program_auto_30
|
||||||
|
- laundry_care_washer_program_auto_40
|
||||||
|
- laundry_care_washer_program_auto_60
|
||||||
|
- laundry_care_washer_program_chiffon
|
||||||
|
- laundry_care_washer_program_curtains
|
||||||
|
- laundry_care_washer_program_dark_wash
|
||||||
|
- laundry_care_washer_program_dessous
|
||||||
|
- laundry_care_washer_program_monsoon
|
||||||
|
- laundry_care_washer_program_outdoor
|
||||||
|
- laundry_care_washer_program_plush_toy
|
||||||
|
- laundry_care_washer_program_shirts_blouses
|
||||||
|
- laundry_care_washer_program_sport_fitness
|
||||||
|
- laundry_care_washer_program_towels
|
||||||
|
- laundry_care_washer_program_water_proof
|
||||||
|
- laundry_care_washer_program_power_speed_59
|
||||||
|
- laundry_care_washer_program_super_153045_super_15
|
||||||
|
- laundry_care_washer_program_super_153045_super_1530
|
||||||
|
- laundry_care_washer_program_down_duvet_duvet
|
||||||
|
- laundry_care_washer_program_rinse_rinse_spin_drain
|
||||||
|
- laundry_care_washer_program_drum_clean
|
||||||
|
- laundry_care_washer_dryer_program_cotton
|
||||||
|
- laundry_care_washer_dryer_program_cotton_eco_4060
|
||||||
|
- laundry_care_washer_dryer_program_mix
|
||||||
|
- laundry_care_washer_dryer_program_easy_care
|
||||||
|
- laundry_care_washer_dryer_program_wash_and_dry_60
|
||||||
|
- laundry_care_washer_dryer_program_wash_and_dry_90
|
||||||
|
cleaning_robot_options:
|
||||||
|
collapsed: true
|
||||||
|
fields:
|
||||||
|
consumer_products_cleaning_robot_option_reference_map_id:
|
||||||
|
example: consumer_products_cleaning_robot_enum_type_available_maps_map1
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: available_maps
|
||||||
|
options:
|
||||||
|
- consumer_products_cleaning_robot_enum_type_available_maps_temp_map
|
||||||
|
- consumer_products_cleaning_robot_enum_type_available_maps_map1
|
||||||
|
- consumer_products_cleaning_robot_enum_type_available_maps_map2
|
||||||
|
- consumer_products_cleaning_robot_enum_type_available_maps_map3
|
||||||
|
consumer_products_cleaning_robot_option_cleaning_mode:
|
||||||
|
example: consumer_products_cleaning_robot_enum_type_cleaning_modes_standard
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: cleaning_mode
|
||||||
|
options:
|
||||||
|
- consumer_products_cleaning_robot_enum_type_cleaning_modes_silent
|
||||||
|
- consumer_products_cleaning_robot_enum_type_cleaning_modes_standard
|
||||||
|
- consumer_products_cleaning_robot_enum_type_cleaning_modes_power
|
||||||
|
coffee_maker_options:
|
||||||
|
collapsed: true
|
||||||
|
fields:
|
||||||
|
consumer_products_coffee_maker_option_bean_amount:
|
||||||
|
example: consumer_products_coffee_maker_enum_type_bean_amount_normal
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: bean_amount
|
||||||
|
options:
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_very_mild
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_mild
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_mild_plus
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_normal
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_normal_plus
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_strong
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_strong_plus
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_very_strong
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_very_strong_plus
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_extra_strong
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_double_shot
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_double_shot_plus
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_double_shot_plus_plus
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_triple_shot
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_triple_shot_plus
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_amount_coffee_ground
|
||||||
|
consumer_products_coffee_maker_option_fill_quantity:
|
||||||
|
example: 60
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
number:
|
||||||
|
min: 0
|
||||||
|
step: 1
|
||||||
|
mode: box
|
||||||
|
unit_of_measurement: ml
|
||||||
|
consumer_products_coffee_maker_option_coffee_temperature:
|
||||||
|
example: consumer_products_coffee_maker_enum_type_coffee_temperature_88_c
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: coffee_temperature
|
||||||
|
options:
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_temperature_88_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_temperature_90_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_temperature_92_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_temperature_94_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_temperature_95_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_temperature_96_c
|
||||||
|
consumer_products_coffee_maker_option_bean_container:
|
||||||
|
example: consumer_products_coffee_maker_enum_type_bean_container_selection_right
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: bean_container
|
||||||
|
options:
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_container_selection_right
|
||||||
|
- consumer_products_coffee_maker_enum_type_bean_container_selection_left
|
||||||
|
consumer_products_coffee_maker_option_flow_rate:
|
||||||
|
example: consumer_products_coffee_maker_enum_type_flow_rate_normal
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: flow_rate
|
||||||
|
options:
|
||||||
|
- consumer_products_coffee_maker_enum_type_flow_rate_normal
|
||||||
|
- consumer_products_coffee_maker_enum_type_flow_rate_intense
|
||||||
|
- consumer_products_coffee_maker_enum_type_flow_rate_intense_plus
|
||||||
|
consumer_products_coffee_maker_option_multiple_beverages:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
consumer_products_coffee_maker_option_coffee_milk_ratio:
|
||||||
|
example: consumer_products_coffee_maker_enum_type_coffee_milk_ratio_50_percent
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: coffee_milk_ratio
|
||||||
|
options:
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_10_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_20_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_25_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_30_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_40_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_50_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_55_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_60_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_65_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_67_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_70_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_75_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_80_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_85_percent
|
||||||
|
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_90_percent
|
||||||
|
consumer_products_coffee_maker_option_hot_water_temperature:
|
||||||
|
example: consumer_products_coffee_maker_enum_type_hot_water_temperature_50_c
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: hot_water_temperature
|
||||||
|
options:
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_white_tea
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_green_tea
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_black_tea
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_50_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_55_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_60_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_65_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_70_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_75_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_80_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_85_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_90_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_95_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_97_c
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_122_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_131_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_140_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_149_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_158_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_167_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_176_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_185_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_194_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_203_f
|
||||||
|
- consumer_products_coffee_maker_enum_type_hot_water_temperature_max
|
||||||
|
dish_washer_options:
|
||||||
|
collapsed: true
|
||||||
|
fields:
|
||||||
|
b_s_h_common_option_start_in_relative:
|
||||||
|
example: 3600
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
number:
|
||||||
|
min: 0
|
||||||
|
step: 1
|
||||||
|
mode: box
|
||||||
|
unit_of_measurement: s
|
||||||
|
dishcare_dishwasher_option_intensiv_zone:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dishcare_dishwasher_option_brilliance_dry:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dishcare_dishwasher_option_vario_speed_plus:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dishcare_dishwasher_option_silence_on_demand:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dishcare_dishwasher_option_half_load:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dishcare_dishwasher_option_extra_dry:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dishcare_dishwasher_option_hygiene_plus:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dishcare_dishwasher_option_eco_dry:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dishcare_dishwasher_option_zeolite_dry:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
dryer_options:
|
||||||
|
collapsed: true
|
||||||
|
fields:
|
||||||
|
laundry_care_dryer_option_drying_target:
|
||||||
|
example: laundry_care_dryer_enum_type_drying_target_iron_dry
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: drying_target
|
||||||
|
options:
|
||||||
|
- laundry_care_dryer_enum_type_drying_target_iron_dry
|
||||||
|
- laundry_care_dryer_enum_type_drying_target_gentle_dry
|
||||||
|
- laundry_care_dryer_enum_type_drying_target_cupboard_dry
|
||||||
|
- laundry_care_dryer_enum_type_drying_target_cupboard_dry_plus
|
||||||
|
- laundry_care_dryer_enum_type_drying_target_extra_dry
|
||||||
|
hood_options:
|
||||||
|
collapsed: true
|
||||||
|
fields:
|
||||||
|
cooking_hood_option_venting_level:
|
||||||
|
example: cooking_hood_enum_type_stage_fan_stage01
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: venting_level
|
||||||
|
options:
|
||||||
|
- cooking_hood_enum_type_stage_fan_off
|
||||||
|
- cooking_hood_enum_type_stage_fan_stage01
|
||||||
|
- cooking_hood_enum_type_stage_fan_stage02
|
||||||
|
- cooking_hood_enum_type_stage_fan_stage03
|
||||||
|
- cooking_hood_enum_type_stage_fan_stage04
|
||||||
|
- cooking_hood_enum_type_stage_fan_stage05
|
||||||
|
cooking_hood_option_intensive_level:
|
||||||
|
example: cooking_hood_enum_type_intensive_stage_intensive_stage1
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: intensive_level
|
||||||
|
options:
|
||||||
|
- cooking_hood_enum_type_intensive_stage_intensive_stage_off
|
||||||
|
- cooking_hood_enum_type_intensive_stage_intensive_stage1
|
||||||
|
- cooking_hood_enum_type_intensive_stage_intensive_stage2
|
||||||
|
oven_options:
|
||||||
|
collapsed: true
|
||||||
|
fields:
|
||||||
|
cooking_oven_option_setpoint_temperature:
|
||||||
|
example: 180
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
number:
|
||||||
|
min: 0
|
||||||
|
step: 1
|
||||||
|
mode: box
|
||||||
|
unit_of_measurement: °C/°F
|
||||||
|
b_s_h_common_option_duration:
|
||||||
|
example: 900
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
number:
|
||||||
|
min: 0
|
||||||
|
step: 1
|
||||||
|
mode: box
|
||||||
|
unit_of_measurement: s
|
||||||
|
cooking_oven_option_fast_pre_heat:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
warming_drawer_options:
|
||||||
|
collapsed: true
|
||||||
|
fields:
|
||||||
|
cooking_oven_option_warming_level:
|
||||||
|
example: cooking_oven_enum_type_warming_level_medium
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: warming_level
|
||||||
|
options:
|
||||||
|
- cooking_oven_enum_type_warming_level_low
|
||||||
|
- cooking_oven_enum_type_warming_level_medium
|
||||||
|
- cooking_oven_enum_type_warming_level_high
|
||||||
|
washer_options:
|
||||||
|
collapsed: true
|
||||||
|
fields:
|
||||||
|
laundry_care_washer_option_temperature:
|
||||||
|
example: laundry_care_washer_enum_type_temperature_g_c40
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: washer_temperature
|
||||||
|
options:
|
||||||
|
- laundry_care_washer_enum_type_temperature_cold
|
||||||
|
- laundry_care_washer_enum_type_temperature_g_c20
|
||||||
|
- laundry_care_washer_enum_type_temperature_g_c30
|
||||||
|
- laundry_care_washer_enum_type_temperature_g_c40
|
||||||
|
- laundry_care_washer_enum_type_temperature_g_c50
|
||||||
|
- laundry_care_washer_enum_type_temperature_g_c60
|
||||||
|
- laundry_care_washer_enum_type_temperature_g_c70
|
||||||
|
- laundry_care_washer_enum_type_temperature_g_c80
|
||||||
|
- laundry_care_washer_enum_type_temperature_g_c90
|
||||||
|
- laundry_care_washer_enum_type_temperature_ul_cold
|
||||||
|
- laundry_care_washer_enum_type_temperature_ul_warm
|
||||||
|
- laundry_care_washer_enum_type_temperature_ul_hot
|
||||||
|
- laundry_care_washer_enum_type_temperature_ul_extra_hot
|
||||||
|
laundry_care_washer_option_spin_speed:
|
||||||
|
example: laundry_care_washer_enum_type_spin_speed_r_p_m800
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: spin_speed
|
||||||
|
options:
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_off
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_r_p_m400
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_r_p_m600
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_r_p_m800
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_r_p_m1000
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_r_p_m1200
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_r_p_m1400
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_r_p_m1600
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_ul_off
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_ul_low
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_ul_medium
|
||||||
|
- laundry_care_washer_enum_type_spin_speed_ul_high
|
||||||
|
b_s_h_common_option_finish_in_relative:
|
||||||
|
example: 3600
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
number:
|
||||||
|
min: 0
|
||||||
|
step: 1
|
||||||
|
mode: box
|
||||||
|
unit_of_measurement: s
|
||||||
|
laundry_care_washer_option_i_dos1_active:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
laundry_care_washer_option_i_dos2_active:
|
||||||
|
example: false
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
laundry_care_washer_option_vario_perfect:
|
||||||
|
example: laundry_care_common_enum_type_vario_perfect_eco_perfect
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
mode: dropdown
|
||||||
|
translation_key: vario_perfect
|
||||||
|
options:
|
||||||
|
- laundry_care_common_enum_type_vario_perfect_off
|
||||||
|
- laundry_care_common_enum_type_vario_perfect_eco_perfect
|
||||||
|
- laundry_care_common_enum_type_vario_perfect_speed_perfect
|
||||||
pause_program:
|
pause_program:
|
||||||
fields:
|
fields:
|
||||||
device_id:
|
device_id:
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -12,7 +12,7 @@ import logging
|
|||||||
from universal_silabs_flasher.const import ApplicationType as FlasherApplicationType
|
from universal_silabs_flasher.const import ApplicationType as FlasherApplicationType
|
||||||
from universal_silabs_flasher.flasher import Flasher
|
from universal_silabs_flasher.flasher import Flasher
|
||||||
|
|
||||||
from homeassistant.components.hassio import AddonError, AddonState
|
from homeassistant.components.hassio import AddonError, AddonManager, AddonState
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.hassio import is_hassio
|
from homeassistant.helpers.hassio import is_hassio
|
||||||
@ -143,6 +143,31 @@ class FirmwareInfo:
|
|||||||
return all(states)
|
return all(states)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_otbr_addon_firmware_info(
|
||||||
|
hass: HomeAssistant, otbr_addon_manager: AddonManager
|
||||||
|
) -> FirmwareInfo | None:
|
||||||
|
"""Get firmware info from the OTBR add-on."""
|
||||||
|
try:
|
||||||
|
otbr_addon_info = await otbr_addon_manager.async_get_addon_info()
|
||||||
|
except AddonError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if otbr_addon_info.state == AddonState.NOT_INSTALLED:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if (otbr_path := otbr_addon_info.options.get("device")) is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Only create a new entry if there are no existing OTBR ones
|
||||||
|
return FirmwareInfo(
|
||||||
|
device=otbr_path,
|
||||||
|
firmware_type=ApplicationType.SPINEL,
|
||||||
|
firmware_version=None,
|
||||||
|
source="otbr",
|
||||||
|
owners=[OwningAddon(slug=otbr_addon_manager.addon_slug)],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def guess_hardware_owners(
|
async def guess_hardware_owners(
|
||||||
hass: HomeAssistant, device_path: str
|
hass: HomeAssistant, device_path: str
|
||||||
) -> list[FirmwareInfo]:
|
) -> list[FirmwareInfo]:
|
||||||
@ -155,28 +180,19 @@ async def guess_hardware_owners(
|
|||||||
# It may be possible for the OTBR addon to be present without the integration
|
# It may be possible for the OTBR addon to be present without the integration
|
||||||
if is_hassio(hass):
|
if is_hassio(hass):
|
||||||
otbr_addon_manager = get_otbr_addon_manager(hass)
|
otbr_addon_manager = get_otbr_addon_manager(hass)
|
||||||
|
otbr_addon_fw_info = await get_otbr_addon_firmware_info(
|
||||||
|
hass, otbr_addon_manager
|
||||||
|
)
|
||||||
|
otbr_path = (
|
||||||
|
otbr_addon_fw_info.device if otbr_addon_fw_info is not None else None
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
# Only create a new entry if there are no existing OTBR ones
|
||||||
otbr_addon_info = await otbr_addon_manager.async_get_addon_info()
|
if otbr_path is not None and not any(
|
||||||
except AddonError:
|
info.source == "otbr" for info in device_guesses[otbr_path]
|
||||||
pass
|
):
|
||||||
else:
|
assert otbr_addon_fw_info is not None
|
||||||
if otbr_addon_info.state != AddonState.NOT_INSTALLED:
|
device_guesses[otbr_path].append(otbr_addon_fw_info)
|
||||||
otbr_path = otbr_addon_info.options.get("device")
|
|
||||||
|
|
||||||
# Only create a new entry if there are no existing OTBR ones
|
|
||||||
if otbr_path is not None and not any(
|
|
||||||
info.source == "otbr" for info in device_guesses[otbr_path]
|
|
||||||
):
|
|
||||||
device_guesses[otbr_path].append(
|
|
||||||
FirmwareInfo(
|
|
||||||
device=otbr_path,
|
|
||||||
firmware_type=ApplicationType.SPINEL,
|
|
||||||
firmware_version=None,
|
|
||||||
source="otbr",
|
|
||||||
owners=[OwningAddon(slug=otbr_addon_manager.addon_slug)],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if is_hassio(hass):
|
if is_hassio(hass):
|
||||||
multipan_addon_manager = await get_multiprotocol_addon_manager(hass)
|
multipan_addon_manager = await get_multiprotocol_addon_manager(hass)
|
||||||
|
@ -62,7 +62,7 @@ class HMDevice(Entity):
|
|||||||
if self._state:
|
if self._state:
|
||||||
self._state = self._state.upper()
|
self._state = self._state.upper()
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Load data init callbacks."""
|
"""Load data init callbacks."""
|
||||||
self._subscribe_homematic_events()
|
self._subscribe_homematic_events()
|
||||||
|
|
||||||
@ -77,7 +77,7 @@ class HMDevice(Entity):
|
|||||||
return self._name
|
return self._name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self):
|
def available(self) -> bool:
|
||||||
"""Return true if device is available."""
|
"""Return true if device is available."""
|
||||||
return self._available
|
return self._available
|
||||||
|
|
||||||
|
@ -23,8 +23,10 @@ import voluptuous as vol
|
|||||||
from homeassistant.components import onboarding
|
from homeassistant.components import onboarding
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_TOKEN
|
from homeassistant.const import CONF_IP_ADDRESS, CONF_TOKEN
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.data_entry_flow import AbortFlow
|
from homeassistant.data_entry_flow import AbortFlow
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers import instance_id
|
||||||
from homeassistant.helpers.selector import TextSelector
|
from homeassistant.helpers.selector import TextSelector
|
||||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||||
@ -88,7 +90,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
# Tell device we want a token, user must now press the button within 30 seconds
|
# Tell device we want a token, user must now press the button within 30 seconds
|
||||||
# The first attempt will always fail, but this opens the window to press the button
|
# The first attempt will always fail, but this opens the window to press the button
|
||||||
token = await async_request_token(self.ip_address)
|
token = await async_request_token(self.hass, self.ip_address)
|
||||||
errors: dict[str, str] | None = None
|
errors: dict[str, str] | None = None
|
||||||
|
|
||||||
if token is None:
|
if token is None:
|
||||||
@ -250,7 +252,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
errors: dict[str, str] | None = None
|
errors: dict[str, str] | None = None
|
||||||
|
|
||||||
token = await async_request_token(self.ip_address)
|
token = await async_request_token(self.hass, self.ip_address)
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
if token is None:
|
if token is None:
|
||||||
@ -353,7 +355,7 @@ async def async_try_connect(ip_address: str, token: str | None = None) -> Device
|
|||||||
await energy_api.close()
|
await energy_api.close()
|
||||||
|
|
||||||
|
|
||||||
async def async_request_token(ip_address: str) -> str | None:
|
async def async_request_token(hass: HomeAssistant, ip_address: str) -> str | None:
|
||||||
"""Try to request a token from the device.
|
"""Try to request a token from the device.
|
||||||
|
|
||||||
This method is used to request a token from the device,
|
This method is used to request a token from the device,
|
||||||
@ -362,8 +364,12 @@ async def async_request_token(ip_address: str) -> str | None:
|
|||||||
|
|
||||||
api = HomeWizardEnergyV2(ip_address)
|
api = HomeWizardEnergyV2(ip_address)
|
||||||
|
|
||||||
|
# Get a part of the unique id to make the token unique
|
||||||
|
# This is to prevent token conflicts when multiple HA instances are used
|
||||||
|
uuid = await instance_id.async_get(hass)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return await api.get_token("home-assistant")
|
return await api.get_token(f"home-assistant#{uuid[:6]}")
|
||||||
except DisabledError:
|
except DisabledError:
|
||||||
return None
|
return None
|
||||||
finally:
|
finally:
|
||||||
|
@ -47,7 +47,7 @@ class MigrateToV2ApiRepairFlow(RepairsFlow):
|
|||||||
|
|
||||||
# Tell device we want a token, user must now press the button within 30 seconds
|
# Tell device we want a token, user must now press the button within 30 seconds
|
||||||
# The first attempt will always fail, but this opens the window to press the button
|
# The first attempt will always fail, but this opens the window to press the button
|
||||||
token = await async_request_token(ip_address)
|
token = await async_request_token(self.hass, ip_address)
|
||||||
errors: dict[str, str] | None = None
|
errors: dict[str, str] | None = None
|
||||||
|
|
||||||
if token is None:
|
if token is None:
|
||||||
|
@ -54,7 +54,7 @@ class IHCEntity(Entity):
|
|||||||
self.ihc_note = ""
|
self.ihc_note = ""
|
||||||
self.ihc_position = ""
|
self.ihc_position = ""
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Add callback for IHC changes."""
|
"""Add callback for IHC changes."""
|
||||||
_LOGGER.debug("Adding IHC entity notify event: %s", self.ihc_id)
|
_LOGGER.debug("Adding IHC entity notify event: %s", self.ihc_id)
|
||||||
self.ihc_controller.add_notify_event(self.ihc_id, self.on_ihc_change, True)
|
self.ihc_controller.add_notify_event(self.ihc_id, self.on_ihc_change, True)
|
||||||
|
@ -109,7 +109,7 @@ class InsteonEntity(Entity):
|
|||||||
)
|
)
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Register INSTEON update events."""
|
"""Register INSTEON update events."""
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Tracking updates for device %s group %d name %s",
|
"Tracking updates for device %s group %d name %s",
|
||||||
@ -137,7 +137,7 @@ class InsteonEntity(Entity):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_will_remove_from_hass(self):
|
async def async_will_remove_from_hass(self) -> None:
|
||||||
"""Unsubscribe to INSTEON update events."""
|
"""Unsubscribe to INSTEON update events."""
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Remove tracking updates for device %s group %d name %s",
|
"Remove tracking updates for device %s group %d name %s",
|
||||||
|
@ -106,7 +106,7 @@ class ISYNodeEntity(ISYEntity):
|
|||||||
return getattr(self._node, TAG_ENABLED, True)
|
return getattr(self._node, TAG_ENABLED, True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extra_state_attributes(self) -> dict:
|
def extra_state_attributes(self) -> dict[str, Any]:
|
||||||
"""Get the state attributes for the device.
|
"""Get the state attributes for the device.
|
||||||
|
|
||||||
The 'aux_properties' in the pyisy Node class are combined with the
|
The 'aux_properties' in the pyisy Node class are combined with the
|
||||||
@ -189,7 +189,7 @@ class ISYProgramEntity(ISYEntity):
|
|||||||
self._actions = actions
|
self._actions = actions
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extra_state_attributes(self) -> dict:
|
def extra_state_attributes(self) -> dict[str, Any]:
|
||||||
"""Get the state attributes for the device."""
|
"""Get the state attributes for the device."""
|
||||||
attr = {}
|
attr = {}
|
||||||
if self._actions:
|
if self._actions:
|
||||||
|
@ -58,7 +58,7 @@
|
|||||||
"services": {
|
"services": {
|
||||||
"send_raw_node_command": {
|
"send_raw_node_command": {
|
||||||
"name": "Send raw node command",
|
"name": "Send raw node command",
|
||||||
"description": "[%key:component::isy994::options::step::init::description%]",
|
"description": "Sends a “raw” (e.g., DON, DOF) ISY REST device command to a node using its Home Assistant entity ID. This is useful for devices that aren’t fully supported in Home Assistant yet, such as controls for many NodeServer nodes.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"command": {
|
"command": {
|
||||||
"name": "Command",
|
"name": "Command",
|
||||||
|
@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import SCAN_INTERVAL
|
from .const import DOMAIN, SCAN_INTERVAL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -75,16 +75,28 @@ class LaCrosseUpdateCoordinator(DataUpdateCoordinator[list[Sensor]]):
|
|||||||
try:
|
try:
|
||||||
# Fetch last hour of data
|
# Fetch last hour of data
|
||||||
for sensor in self.devices:
|
for sensor in self.devices:
|
||||||
sensor.data = (
|
data = await self.api.get_sensor_status(
|
||||||
await self.api.get_sensor_status(
|
sensor=sensor,
|
||||||
sensor=sensor,
|
tz=self.hass.config.time_zone,
|
||||||
tz=self.hass.config.time_zone,
|
)
|
||||||
|
_LOGGER.debug("Got data: %s", data)
|
||||||
|
|
||||||
|
if data_error := data.get("error"):
|
||||||
|
if data_error == "no_readings":
|
||||||
|
sensor.data = None
|
||||||
|
_LOGGER.debug("No readings for %s", sensor.name)
|
||||||
|
continue
|
||||||
|
_LOGGER.debug("Error: %s", data_error)
|
||||||
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN, translation_key="update_error"
|
||||||
)
|
)
|
||||||
)["data"]["current"]
|
|
||||||
_LOGGER.debug("Got data: %s", sensor.data)
|
sensor.data = data["data"]["current"]
|
||||||
|
|
||||||
except HTTPError as error:
|
except HTTPError as error:
|
||||||
raise UpdateFailed from error
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN, translation_key="update_error"
|
||||||
|
) from error
|
||||||
|
|
||||||
# Verify that we have permission to read the sensors
|
# Verify that we have permission to read the sensors
|
||||||
for sensor in self.devices:
|
for sensor in self.devices:
|
||||||
|
@ -64,6 +64,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
"Humidity": LaCrosseSensorEntityDescription(
|
"Humidity": LaCrosseSensorEntityDescription(
|
||||||
key="Humidity",
|
key="Humidity",
|
||||||
@ -71,6 +72,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
"HeatIndex": LaCrosseSensorEntityDescription(
|
"HeatIndex": LaCrosseSensorEntityDescription(
|
||||||
key="HeatIndex",
|
key="HeatIndex",
|
||||||
@ -79,6 +81,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
"WindSpeed": LaCrosseSensorEntityDescription(
|
"WindSpeed": LaCrosseSensorEntityDescription(
|
||||||
key="WindSpeed",
|
key="WindSpeed",
|
||||||
@ -86,6 +89,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||||
device_class=SensorDeviceClass.WIND_SPEED,
|
device_class=SensorDeviceClass.WIND_SPEED,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
"Rain": LaCrosseSensorEntityDescription(
|
"Rain": LaCrosseSensorEntityDescription(
|
||||||
key="Rain",
|
key="Rain",
|
||||||
@ -93,12 +97,14 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
"WindHeading": LaCrosseSensorEntityDescription(
|
"WindHeading": LaCrosseSensorEntityDescription(
|
||||||
key="WindHeading",
|
key="WindHeading",
|
||||||
translation_key="wind_heading",
|
translation_key="wind_heading",
|
||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
native_unit_of_measurement=DEGREE,
|
native_unit_of_measurement=DEGREE,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
"WetDry": LaCrosseSensorEntityDescription(
|
"WetDry": LaCrosseSensorEntityDescription(
|
||||||
key="WetDry",
|
key="WetDry",
|
||||||
@ -117,6 +123,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||||
native_unit_of_measurement=UnitOfPressure.HPA,
|
native_unit_of_measurement=UnitOfPressure.HPA,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
"FeelsLike": LaCrosseSensorEntityDescription(
|
"FeelsLike": LaCrosseSensorEntityDescription(
|
||||||
key="FeelsLike",
|
key="FeelsLike",
|
||||||
@ -125,6 +132,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
"WindChill": LaCrosseSensorEntityDescription(
|
"WindChill": LaCrosseSensorEntityDescription(
|
||||||
key="WindChill",
|
key="WindChill",
|
||||||
@ -133,6 +141,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
value_fn=get_value,
|
value_fn=get_value,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||||
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
# map of API returned unit of measurement strings to their corresponding unit of measurement
|
# map of API returned unit of measurement strings to their corresponding unit of measurement
|
||||||
|
@ -42,5 +42,10 @@
|
|||||||
"name": "Wind chill"
|
"name": "Wind chill"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"update_error": {
|
||||||
|
"message": "Error updating data"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,12 @@ from .const import (
|
|||||||
)
|
)
|
||||||
from .coordinator import LetPotConfigEntry, LetPotDeviceCoordinator
|
from .coordinator import LetPotConfigEntry, LetPotDeviceCoordinator
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.SWITCH, Platform.TIME]
|
PLATFORMS: list[Platform] = [
|
||||||
|
Platform.BINARY_SENSOR,
|
||||||
|
Platform.SENSOR,
|
||||||
|
Platform.SWITCH,
|
||||||
|
Platform.TIME,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: LetPotConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: LetPotConfigEntry) -> bool:
|
||||||
|
122
homeassistant/components/letpot/binary_sensor.py
Normal file
122
homeassistant/components/letpot/binary_sensor.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
"""Support for LetPot binary sensor entities."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from letpot.models import DeviceFeature, LetPotDeviceStatus
|
||||||
|
|
||||||
|
from homeassistant.components.binary_sensor import (
|
||||||
|
BinarySensorDeviceClass,
|
||||||
|
BinarySensorEntity,
|
||||||
|
BinarySensorEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.const import EntityCategory
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import LetPotConfigEntry, LetPotDeviceCoordinator
|
||||||
|
from .entity import LetPotEntity, LetPotEntityDescription
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class LetPotBinarySensorEntityDescription(
|
||||||
|
LetPotEntityDescription, BinarySensorEntityDescription
|
||||||
|
):
|
||||||
|
"""Describes a LetPot binary sensor entity."""
|
||||||
|
|
||||||
|
is_on_fn: Callable[[LetPotDeviceStatus], bool]
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_SENSORS: tuple[LetPotBinarySensorEntityDescription, ...] = (
|
||||||
|
LetPotBinarySensorEntityDescription(
|
||||||
|
key="low_nutrients",
|
||||||
|
translation_key="low_nutrients",
|
||||||
|
is_on_fn=lambda status: bool(status.errors.low_nutrients),
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||||
|
supported_fn=(
|
||||||
|
lambda coordinator: coordinator.data.errors.low_nutrients is not None
|
||||||
|
),
|
||||||
|
),
|
||||||
|
LetPotBinarySensorEntityDescription(
|
||||||
|
key="low_water",
|
||||||
|
translation_key="low_water",
|
||||||
|
is_on_fn=lambda status: bool(status.errors.low_water),
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||||
|
supported_fn=lambda coordinator: coordinator.data.errors.low_water is not None,
|
||||||
|
),
|
||||||
|
LetPotBinarySensorEntityDescription(
|
||||||
|
key="pump",
|
||||||
|
translation_key="pump",
|
||||||
|
is_on_fn=lambda status: status.pump_status == 1,
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
supported_fn=(
|
||||||
|
lambda coordinator: DeviceFeature.PUMP_STATUS
|
||||||
|
in coordinator.device_client.device_features
|
||||||
|
),
|
||||||
|
),
|
||||||
|
LetPotBinarySensorEntityDescription(
|
||||||
|
key="pump_error",
|
||||||
|
translation_key="pump_error",
|
||||||
|
is_on_fn=lambda status: bool(status.errors.pump_malfunction),
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||||
|
supported_fn=(
|
||||||
|
lambda coordinator: coordinator.data.errors.pump_malfunction is not None
|
||||||
|
),
|
||||||
|
),
|
||||||
|
LetPotBinarySensorEntityDescription(
|
||||||
|
key="refill_error",
|
||||||
|
translation_key="refill_error",
|
||||||
|
is_on_fn=lambda status: bool(status.errors.refill_error),
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||||
|
supported_fn=(
|
||||||
|
lambda coordinator: coordinator.data.errors.refill_error is not None
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: LetPotConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up LetPot binary sensor entities based on a config entry and device status/features."""
|
||||||
|
coordinators = entry.runtime_data
|
||||||
|
async_add_entities(
|
||||||
|
LetPotBinarySensorEntity(coordinator, description)
|
||||||
|
for description in BINARY_SENSORS
|
||||||
|
for coordinator in coordinators
|
||||||
|
if description.supported_fn(coordinator)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LetPotBinarySensorEntity(LetPotEntity, BinarySensorEntity):
|
||||||
|
"""Defines a LetPot binary sensor entity."""
|
||||||
|
|
||||||
|
entity_description: LetPotBinarySensorEntityDescription
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: LetPotDeviceCoordinator,
|
||||||
|
description: LetPotBinarySensorEntityDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize LetPot binary sensor entity."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self.entity_description = description
|
||||||
|
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{coordinator.device.serial_number}_{description.key}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return if the binary sensor is on."""
|
||||||
|
return self.entity_description.is_on_fn(self.coordinator.data)
|
@ -1,18 +1,27 @@
|
|||||||
"""Base class for LetPot entities."""
|
"""Base class for LetPot entities."""
|
||||||
|
|
||||||
from collections.abc import Callable, Coroutine
|
from collections.abc import Callable, Coroutine
|
||||||
|
from dataclasses import dataclass
|
||||||
from typing import Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
from letpot.exceptions import LetPotConnectionException, LetPotException
|
from letpot.exceptions import LetPotConnectionException, LetPotException
|
||||||
|
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import LetPotDeviceCoordinator
|
from .coordinator import LetPotDeviceCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class LetPotEntityDescription(EntityDescription):
|
||||||
|
"""Description for all LetPot entities."""
|
||||||
|
|
||||||
|
supported_fn: Callable[[LetPotDeviceCoordinator], bool] = lambda _: True
|
||||||
|
|
||||||
|
|
||||||
class LetPotEntity(CoordinatorEntity[LetPotDeviceCoordinator]):
|
class LetPotEntity(CoordinatorEntity[LetPotDeviceCoordinator]):
|
||||||
"""Defines a base LetPot entity."""
|
"""Defines a base LetPot entity."""
|
||||||
|
|
||||||
|
@ -1,5 +1,30 @@
|
|||||||
{
|
{
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"low_nutrients": {
|
||||||
|
"default": "mdi:beaker-alert",
|
||||||
|
"state": {
|
||||||
|
"off": "mdi:beaker"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"low_water": {
|
||||||
|
"default": "mdi:water-percent-alert",
|
||||||
|
"state": {
|
||||||
|
"off": "mdi:water-percent"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pump": {
|
||||||
|
"default": "mdi:pump",
|
||||||
|
"state": {
|
||||||
|
"off": "mdi:pump-off"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sensor": {
|
||||||
|
"water_level": {
|
||||||
|
"default": "mdi:water-percent"
|
||||||
|
}
|
||||||
|
},
|
||||||
"switch": {
|
"switch": {
|
||||||
"alarm_sound": {
|
"alarm_sound": {
|
||||||
"default": "mdi:bell-ring",
|
"default": "mdi:bell-ring",
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user