This commit is contained in:
Jonh Sady 2025-02-18 13:58:26 -03:00
commit 14c8a01f96
392 changed files with 13441 additions and 2604 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

After

Width:  |  Height:  |  Size: 99 KiB

2
Dockerfile generated
View File

@ -13,7 +13,7 @@ ENV \
ARG QEMU_CPU
# Install uv
RUN pip3 install uv==0.5.27
RUN pip3 install uv==0.6.0
WORKDIR /usr/src

View File

@ -134,14 +134,12 @@ DATA_REGISTRIES_LOADED: HassKey[None] = HassKey("bootstrap_registries_loaded")
LOG_SLOW_STARTUP_INTERVAL = 60
SLOW_STARTUP_CHECK_INTERVAL = 1
STAGE_0_SUBSTAGE_TIMEOUT = 60
STAGE_1_TIMEOUT = 120
STAGE_2_TIMEOUT = 300
WRAP_UP_TIMEOUT = 300
COOLDOWN_TIME = 60
DEBUGGER_INTEGRATIONS = {"debugpy"}
# Core integrations are unconditionally loaded
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
@ -152,6 +150,10 @@ LOGGING_AND_HTTP_DEPS_INTEGRATIONS = {
"isal",
# Set log levels
"logger",
# Ensure network config is available
# before hassio or any other integration is
# loaded that might create an aiohttp client session
"network",
# Error logging
"system_log",
"sentry",
@ -172,12 +174,27 @@ FRONTEND_INTEGRATIONS = {
# add it here.
"backup",
}
RECORDER_INTEGRATIONS = {
# Setup after frontend
# To record data
"recorder",
}
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb", "zeroconf")
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
# The substage containing recorder should have no timeout, as it could cancel a database migration.
# Recorder freezes "recorder" timeout during a migration, but it does not freeze other timeouts.
# The substages preceding it should also have no timeout, until we ensure that the recorder
# is not accidentally promoted as a dependency of any of the integrations in them.
# If we add timeouts to the frontend substages, we should make sure they don't apply in recovery mode.
STAGE_0_INTEGRATIONS = (
# Load logging and http deps as soon as possible
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
# Setup frontend
("frontend", FRONTEND_INTEGRATIONS, None),
# Setup recorder
("recorder", {"recorder"}, None),
# Start up debuggers. Start these first in case they want to wait.
("debugger", {"debugpy"}, STAGE_0_SUBSTAGE_TIMEOUT),
# Zeroconf is used for mdns resolution in aiohttp client helper.
("zeroconf", {"zeroconf"}, STAGE_0_SUBSTAGE_TIMEOUT),
)
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb")
# Stage 1 integrations are not to be preimported in bootstrap.
STAGE_1_INTEGRATIONS = {
# We need to make sure discovery integrations
# update their deps before stage 2 integrations
@ -189,9 +206,8 @@ STAGE_1_INTEGRATIONS = {
"mqtt_eventstream",
# To provide account link implementations
"cloud",
# Ensure supervisor is available
"hassio",
}
DEFAULT_INTEGRATIONS = {
# These integrations are set up unless recovery mode is activated.
#
@ -232,22 +248,12 @@ DEFAULT_INTEGRATIONS_SUPERVISOR = {
# These integrations are set up if using the Supervisor
"hassio",
}
CRITICAL_INTEGRATIONS = {
# Recovery mode is activated if these integrations fail to set up
"frontend",
}
SETUP_ORDER = (
# Load logging and http deps as soon as possible
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
# Setup frontend
("frontend", FRONTEND_INTEGRATIONS),
# Setup recorder
("recorder", RECORDER_INTEGRATIONS),
# Start up debuggers. Start these first in case they want to wait.
("debugger", DEBUGGER_INTEGRATIONS),
)
#
# Storage keys we are likely to load during startup
# in order of when we expect to load them.
@ -694,7 +700,6 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
return deps_dir
@core.callback
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
"""Get domains of components to set up."""
# Filter out the repeating and common config section [homeassistant]
@ -890,69 +895,48 @@ async def _async_set_up_integrations(
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
hass, config
)
stage_2_domains = domains_to_setup.copy()
# Initialize recorder
if "recorder" in domains_to_setup:
recorder.async_initialize_recorder(hass)
pre_stage_domains = [
(name, domains_to_setup & domain_group) for name, domain_group in SETUP_ORDER
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
*(
(name, domain_group & domains_to_setup, timeout)
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
),
("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT),
]
# calculate what components to setup in what stage
stage_1_domains: set[str] = set()
_LOGGER.info("Setting up stage 0 and 1")
for name, domain_group, timeout in stage_0_and_1_domains:
if not domain_group:
continue
# Find all dependencies of any dependency of any stage 1 integration that
# we plan on loading and promote them to stage 1. This is done only to not
# get misleading log messages
deps_promotion: set[str] = STAGE_1_INTEGRATIONS
while deps_promotion:
old_deps_promotion = deps_promotion
deps_promotion = set()
_LOGGER.info("Setting up %s: %s", name, domain_group)
to_be_loaded = domain_group.copy()
to_be_loaded.update(
dep
for domain in domain_group
if (integration := integration_cache.get(domain)) is not None
for dep in integration.all_dependencies
)
async_set_domains_to_be_loaded(hass, to_be_loaded)
stage_2_domains -= to_be_loaded
for domain in old_deps_promotion:
if domain not in domains_to_setup or domain in stage_1_domains:
continue
stage_1_domains.add(domain)
if (dep_itg := integration_cache.get(domain)) is None:
continue
deps_promotion.update(dep_itg.all_dependencies)
stage_2_domains = domains_to_setup - stage_1_domains
for name, domain_group in pre_stage_domains:
if domain_group:
stage_2_domains -= domain_group
_LOGGER.info("Setting up %s: %s", name, domain_group)
to_be_loaded = domain_group.copy()
to_be_loaded.update(
dep
for domain in domain_group
if (integration := integration_cache.get(domain)) is not None
for dep in integration.all_dependencies
)
async_set_domains_to_be_loaded(hass, to_be_loaded)
if timeout is None:
await _async_setup_multi_components(hass, domain_group, config)
# Enables after dependencies when setting up stage 1 domains
async_set_domains_to_be_loaded(hass, stage_1_domains)
# Start setup
if stage_1_domains:
_LOGGER.info("Setting up stage 1: %s", stage_1_domains)
try:
async with hass.timeout.async_timeout(
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
):
await _async_setup_multi_components(hass, stage_1_domains, config)
except TimeoutError:
_LOGGER.warning(
"Setup timed out for stage 1 waiting on %s - moving forward",
hass._active_tasks, # noqa: SLF001
)
else:
try:
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
await _async_setup_multi_components(hass, domain_group, config)
except TimeoutError:
_LOGGER.warning(
"Setup timed out for %s waiting on %s - moving forward",
name,
hass._active_tasks, # noqa: SLF001
)
# Add after dependencies when setting up stage 2 domains
async_set_domains_to_be_loaded(hass, stage_2_domains)

View File

@ -7,7 +7,7 @@ from dataclasses import dataclass
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
@ -123,12 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
"""Unload AdGuard Home config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
if not hass.config_entries.async_loaded_entries(DOMAIN):
# This is the last loaded instance of AdGuard, deregister any services
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)

View File

@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/airgradient",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["airgradient==0.9.1"],
"requirements": ["airgradient==0.9.2"],
"zeroconf": ["_airgradient._tcp.local."]
}

View File

@ -90,7 +90,7 @@
},
"alarm_arm_home": {
"name": "Arm home",
"description": "Sets the alarm to: _armed, but someone is home_.",
"description": "Arms the alarm in the home mode.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@ -100,7 +100,7 @@
},
"alarm_arm_away": {
"name": "Arm away",
"description": "Sets the alarm to: _armed, no one home_.",
"description": "Arms the alarm in the away mode.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@ -110,7 +110,7 @@
},
"alarm_arm_night": {
"name": "Arm night",
"description": "Sets the alarm to: _armed for the night_.",
"description": "Arms the alarm in the night mode.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@ -120,7 +120,7 @@
},
"alarm_arm_vacation": {
"name": "Arm vacation",
"description": "Sets the alarm to: _armed for vacation_.",
"description": "Arms the alarm in the vacation mode.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@ -130,7 +130,7 @@
},
"alarm_trigger": {
"name": "Trigger",
"description": "Trigger the alarm manually.",
"description": "Triggers the alarm manually.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",

View File

@ -1,16 +1,23 @@
"""Conversation support for Anthropic."""
from collections.abc import Callable
from collections.abc import AsyncGenerator, Callable
import json
from typing import Any, Literal, cast
from typing import Any, Literal
import anthropic
from anthropic import AsyncStream
from anthropic._types import NOT_GIVEN
from anthropic.types import (
InputJSONDelta,
Message,
MessageParam,
MessageStreamEvent,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawContentBlockStopEvent,
TextBlock,
TextBlockParam,
TextDelta,
ToolParam,
ToolResultBlockParam,
ToolUseBlock,
@ -109,7 +116,7 @@ def _convert_content(chat_content: conversation.Content) -> MessageParam:
type="tool_use",
id=tool_call.id,
name=tool_call.tool_name,
input=json.dumps(tool_call.tool_args),
input=tool_call.tool_args,
)
for tool_call in chat_content.tool_calls or ()
],
@ -124,6 +131,66 @@ def _convert_content(chat_content: conversation.Content) -> MessageParam:
raise ValueError(f"Unexpected content type: {type(chat_content)}")
async def _transform_stream(
result: AsyncStream[MessageStreamEvent],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
"""Transform the response stream into HA format.
A typical stream of responses might look something like the following:
- RawMessageStartEvent with no content
- RawContentBlockStartEvent with an empty TextBlock
- RawContentBlockDeltaEvent with a TextDelta
- RawContentBlockDeltaEvent with a TextDelta
- RawContentBlockDeltaEvent with a TextDelta
- ...
- RawContentBlockStopEvent
- RawContentBlockStartEvent with ToolUseBlock specifying the function name
- RawContentBlockDeltaEvent with a InputJSONDelta
- RawContentBlockDeltaEvent with a InputJSONDelta
- ...
- RawContentBlockStopEvent
- RawMessageDeltaEvent with a stop_reason='tool_use'
- RawMessageStopEvent(type='message_stop')
"""
if result is None:
raise TypeError("Expected a stream of messages")
current_tool_call: dict | None = None
async for response in result:
LOGGER.debug("Received response: %s", response)
if isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock):
current_tool_call = {
"id": response.content_block.id,
"name": response.content_block.name,
"input": "",
}
elif isinstance(response.content_block, TextBlock):
yield {"role": "assistant"}
elif isinstance(response, RawContentBlockDeltaEvent):
if isinstance(response.delta, InputJSONDelta):
if current_tool_call is None:
raise ValueError("Unexpected delta without a tool call")
current_tool_call["input"] += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
LOGGER.debug("yielding delta: %s", response.delta.text)
yield {"content": response.delta.text}
elif isinstance(response, RawContentBlockStopEvent):
if current_tool_call:
yield {
"tool_calls": [
llm.ToolInput(
id=current_tool_call["id"],
tool_name=current_tool_call["name"],
tool_args=json.loads(current_tool_call["input"]),
)
]
}
current_tool_call = None
class AnthropicConversationEntity(
conversation.ConversationEntity, conversation.AbstractConversationAgent
):
@ -206,58 +273,30 @@ class AnthropicConversationEntity(
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
try:
response = await client.messages.create(
stream = await client.messages.create(
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
messages=messages,
tools=tools or NOT_GIVEN,
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
system=system.content,
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
stream=True,
)
except anthropic.AnthropicError as err:
raise HomeAssistantError(
f"Sorry, I had a problem talking to Anthropic: {err}"
) from err
LOGGER.debug("Response %s", response)
messages.append(_message_convert(response))
text = "".join(
messages.extend(
[
content.text
for content in response.content
if isinstance(content, TextBlock)
_convert_content(content)
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id, _transform_stream(stream)
)
]
)
tool_inputs = [
llm.ToolInput(
id=tool_call.id,
tool_name=tool_call.name,
tool_args=cast(dict[str, Any], tool_call.input),
)
for tool_call in response.content
if isinstance(tool_call, ToolUseBlock)
]
tool_results = [
ToolResultBlockParam(
type="tool_result",
tool_use_id=tool_response.tool_call_id,
content=json.dumps(tool_response.tool_result),
)
async for tool_response in chat_log.async_add_assistant_content(
conversation.AssistantContent(
agent_id=user_input.agent_id,
content=text,
tool_calls=tool_inputs or None,
)
)
]
if tool_results:
messages.append(MessageParam(role="user", content=tool_results))
if not tool_inputs:
if not chat_log.unresponded_tool_results:
break
response_content = chat_log.content[-1]

View File

@ -19,10 +19,20 @@ class ApSystemsEntity(Entity):
data: ApSystemsData,
) -> None:
"""Initialize the APsystems entity."""
# Handle device version safely
sw_version = None
if data.coordinator.device_version:
version_parts = data.coordinator.device_version.split(" ")
if len(version_parts) > 1:
sw_version = version_parts[1]
else:
sw_version = version_parts[0]
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, data.device_id)},
manufacturer="APsystems",
model="EZ1-M",
serial_number=data.device_id,
sw_version=data.coordinator.device_version.split(" ")[1],
sw_version=sw_version,
)

View File

@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
"iot_class": "local_polling",
"loggers": ["arcam"],
"requirements": ["arcam-fmj==1.5.2"],
"requirements": ["arcam-fmj==1.8.0"],
"ssdp": [
{
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",

View File

@ -19,6 +19,7 @@ from .const import (
DOMAIN,
AssistSatelliteEntityFeature,
)
from .entity import AssistSatelliteConfiguration
CONNECTION_TEST_TIMEOUT = 30
@ -91,7 +92,16 @@ def websocket_get_configuration(
)
return
config_dict = asdict(satellite.async_get_configuration())
try:
config_dict = asdict(satellite.async_get_configuration())
except NotImplementedError:
# Stub configuration
config_dict = asdict(
AssistSatelliteConfiguration(
available_wake_words=[], active_wake_words=[], max_active_wake_words=1
)
)
config_dict["pipeline_entity_id"] = satellite.pipeline_entity_id
config_dict["vad_entity_id"] = satellite.vad_sensitivity_entity_id

View File

@ -24,6 +24,8 @@ PLATFORMS = [
Platform.FAN,
Platform.LIGHT,
Platform.SELECT,
Platform.SWITCH,
Platform.TIME,
]

View File

@ -14,5 +14,5 @@
"documentation": "https://www.home-assistant.io/integrations/balboa",
"iot_class": "local_push",
"loggers": ["pybalboa"],
"requirements": ["pybalboa==1.1.2"]
"requirements": ["pybalboa==1.1.3"]
}

View File

@ -78,6 +78,19 @@
"high": "High"
}
}
},
"switch": {
"filter_cycle_2_enabled": {
"name": "Filter cycle 2 enabled"
}
},
"time": {
"filter_cycle_start": {
"name": "Filter cycle {index} start"
},
"filter_cycle_end": {
"name": "Filter cycle {index} end"
}
}
}
}

View File

@ -0,0 +1,48 @@
"""Support for Balboa switches."""
from __future__ import annotations
from typing import Any
from pybalboa import SpaClient
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BalboaConfigEntry
from .entity import BalboaEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: BalboaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the spa's switches."""
spa = entry.runtime_data
async_add_entities([BalboaSwitchEntity(spa)])
class BalboaSwitchEntity(BalboaEntity, SwitchEntity):
"""Representation of a Balboa switch entity."""
def __init__(self, spa: SpaClient) -> None:
"""Initialize a Balboa switch entity."""
super().__init__(spa, "filter_cycle_2_enabled")
self._attr_entity_category = EntityCategory.CONFIG
self._attr_translation_key = "filter_cycle_2_enabled"
@property
def is_on(self) -> bool:
"""Return True if entity is on."""
return self._client.filter_cycle_2_enabled
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
await self._client.configure_filter_cycle(2, enabled=True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
await self._client.configure_filter_cycle(2, enabled=False)

View File

@ -0,0 +1,56 @@
"""Support for Balboa times."""
from __future__ import annotations
from datetime import time
import itertools
from typing import Any
from pybalboa import SpaClient
from homeassistant.components.time import TimeEntity
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BalboaConfigEntry
from .entity import BalboaEntity
FILTER_CYCLE = "filter_cycle_"
async def async_setup_entry(
hass: HomeAssistant,
entry: BalboaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the spa's times."""
spa = entry.runtime_data
async_add_entities(
BalboaTimeEntity(spa, index, period)
for index, period in itertools.product((1, 2), ("start", "end"))
)
class BalboaTimeEntity(BalboaEntity, TimeEntity):
"""Representation of a Balboa time entity."""
entity_category = EntityCategory.CONFIG
def __init__(self, spa: SpaClient, index: int, period: str) -> None:
"""Initialize a Balboa time entity."""
super().__init__(spa, f"{FILTER_CYCLE}{index}_{period}")
self.index = index
self.period = period
self._attr_translation_key = f"{FILTER_CYCLE}{period}"
self._attr_translation_placeholders = {"index": str(index)}
@property
def native_value(self) -> time | None:
"""Return the value reported by the time."""
return getattr(self._client, f"{FILTER_CYCLE}{self.index}_{self.period}")
async def async_set_value(self, value: time) -> None:
"""Change the time."""
args: dict[str, Any] = {self.period: value}
await self._client.configure_filter_cycle(self.index, **args)

View File

@ -4,12 +4,13 @@ from __future__ import annotations
from typing import TYPE_CHECKING, Any
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import BangOlufsenConfigEntry
from .const import DOMAIN
from .const import DEVICE_BUTTONS, DOMAIN
async def async_get_config_entry_diagnostics(
@ -25,8 +26,9 @@ async def async_get_config_entry_diagnostics(
if TYPE_CHECKING:
assert config_entry.unique_id
# Add media_player entity's state
entity_registry = er.async_get(hass)
# Add media_player entity's state
if entity_id := entity_registry.async_get_entity_id(
MEDIA_PLAYER_DOMAIN, DOMAIN, config_entry.unique_id
):
@ -37,4 +39,16 @@ async def async_get_config_entry_diagnostics(
state_dict.pop("context")
data["media_player"] = state_dict
# Add button Event entity states (if enabled)
for device_button in DEVICE_BUTTONS:
if entity_id := entity_registry.async_get_entity_id(
EVENT_DOMAIN, DOMAIN, f"{config_entry.unique_id}_{device_button}"
):
if state := hass.states.get(entity_id):
state_dict = dict(state.as_dict())
# Remove context as it is not relevant
state_dict.pop("context")
data[f"{device_button}_event"] = state_dict
return data

View File

@ -5,14 +5,14 @@
"title": "Manual YAML fix required for Bayesian"
},
"no_prob_given_false": {
"description": "In the Bayesian integration `prob_given_false` is now a required configuration variable as there was no mathematical rationale for the previous default value. Please add this to your `configuration.yml` for `bayesian/{entity}`. These observations will be ignored until you do.",
"description": "In the Bayesian integration `prob_given_false` is now a required configuration variable as there was no mathematical rationale for the previous default value. Please add this to your `configuration.yaml` for `bayesian/{entity}`. These observations will be ignored until you do.",
"title": "Manual YAML addition required for Bayesian"
}
},
"services": {
"reload": {
"name": "[%key:common::action::reload%]",
"description": "Reloads bayesian sensors from the YAML-configuration."
"description": "Reloads Bayesian sensors from the YAML-configuration."
}
}
}

View File

@ -7,5 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["bring_api"],
"quality_scale": "platinum",
"requirements": ["bring-api==1.0.2"]
}

View File

@ -10,9 +10,9 @@ rules:
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: todo
docs-installation-instructions: todo
docs-removal-instructions: todo
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: The integration registers no events
@ -26,8 +26,10 @@ rules:
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
docs-configuration-parameters:
status: exempt
comment: Integration has no configuration parameters
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable:
@ -46,13 +48,15 @@ rules:
discovery:
status: exempt
comment: Integration is a service and has no devices.
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices:
status: exempt
comment: Integration is a service and has no devices.
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices: done
entity-category: done
entity-device-class: done

View File

@ -17,13 +17,13 @@ class BroadlinkEntity(Entity):
self._device = device
self._coordinator = device.update_manager.coordinator
async def async_added_to_hass(self):
async def async_added_to_hass(self) -> None:
"""Call when the entity is added to hass."""
self.async_on_remove(self._coordinator.async_add_listener(self._recv_data))
if self._coordinator.data:
self._update_state(self._coordinator.data)
async def async_update(self):
async def async_update(self) -> None:
"""Update the state of the entity."""
await self._coordinator.async_request_refresh()
@ -49,7 +49,7 @@ class BroadlinkEntity(Entity):
"""
@property
def available(self):
def available(self) -> bool:
"""Return True if the entity is available."""
return self._device.available

View File

@ -6,6 +6,7 @@ import asyncio
from collections.abc import Awaitable, Callable
from datetime import datetime, timedelta
from enum import Enum
import logging
from typing import cast
from hass_nabucasa import Cloud
@ -19,6 +20,7 @@ from homeassistant.const import (
CONF_NAME,
CONF_REGION,
EVENT_HOMEASSISTANT_STOP,
FORMAT_DATETIME,
Platform,
)
from homeassistant.core import Event, HassJob, HomeAssistant, ServiceCall, callback
@ -33,7 +35,7 @@ from homeassistant.helpers.dispatcher import (
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
from homeassistant.loader import async_get_integration, bind_hass
from homeassistant.util.signal_type import SignalType
# Pre-import backup to avoid it being imported
@ -62,11 +64,13 @@ from .const import (
CONF_THINGTALK_SERVER,
CONF_USER_POOL_ID,
DATA_CLOUD,
DATA_CLOUD_LOG_HANDLER,
DATA_PLATFORMS_SETUP,
DOMAIN,
MODE_DEV,
MODE_PROD,
)
from .helpers import FixedSizeQueueLogHandler
from .prefs import CloudPreferences
from .repairs import async_manage_legacy_subscription_issue
from .subscription import async_subscription_info
@ -245,6 +249,8 @@ def async_remote_ui_url(hass: HomeAssistant) -> str:
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Initialize the Home Assistant cloud."""
log_handler = hass.data[DATA_CLOUD_LOG_HANDLER] = await _setup_log_handler(hass)
# Process configs
if DOMAIN in config:
kwargs = dict(config[DOMAIN])
@ -267,6 +273,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def _shutdown(event: Event) -> None:
"""Shutdown event."""
await cloud.stop()
logging.root.removeHandler(log_handler)
del hass.data[DATA_CLOUD_LOG_HANDLER]
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
@ -405,3 +413,19 @@ def _setup_services(hass: HomeAssistant, prefs: CloudPreferences) -> None:
async_register_admin_service(
hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler
)
async def _setup_log_handler(hass: HomeAssistant) -> FixedSizeQueueLogHandler:
fmt = (
"%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
)
handler = FixedSizeQueueLogHandler()
handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
integration = await async_get_integration(hass, DOMAIN)
loggers: set[str] = {integration.pkg_path, *(integration.loggers or [])}
for logger_name in loggers:
logging.getLogger(logger_name).addHandler(handler)
return handler

View File

@ -3,17 +3,20 @@
from __future__ import annotations
import asyncio
import base64
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
import hashlib
import logging
import random
from typing import Any, Literal
from typing import Any
from aiohttp import ClientError
from hass_nabucasa import Cloud, CloudError
from hass_nabucasa.api import CloudApiNonRetryableError
from hass_nabucasa.cloud_api import async_files_delete_file, async_files_list
from hass_nabucasa.cloud_api import (
FilesHandlerListEntry,
async_files_delete_file,
async_files_list,
)
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
from homeassistant.core import HomeAssistant, callback
@ -24,20 +27,11 @@ from .client import CloudClient
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
_LOGGER = logging.getLogger(__name__)
_STORAGE_BACKUP: Literal["backup"] = "backup"
_RETRY_LIMIT = 5
_RETRY_SECONDS_MIN = 60
_RETRY_SECONDS_MAX = 600
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
"""Calculate the MD5 hash of a file."""
file_hash = hashlib.md5()
async for chunk in stream:
file_hash.update(chunk)
return base64.b64encode(file_hash.digest()).decode()
async def async_get_backup_agents(
hass: HomeAssistant,
**kwargs: Any,
@ -86,11 +80,6 @@ class CloudBackupAgent(BackupAgent):
self._cloud = cloud
self._hass = hass
@callback
def _get_backup_filename(self) -> str:
"""Return the backup filename."""
return f"{self._cloud.client.prefs.instance_id}.tar"
async def async_download_backup(
self,
backup_id: str,
@ -101,13 +90,13 @@ class CloudBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes.
"""
if not await self.async_get_backup(backup_id):
if not (backup := await self._async_get_backup(backup_id)):
raise BackupAgentError("Backup not found")
try:
content = await self._cloud.files.download(
storage_type=_STORAGE_BACKUP,
filename=self._get_backup_filename(),
storage_type=StorageType.BACKUP,
filename=backup["Key"],
)
except CloudError as err:
raise BackupAgentError(f"Failed to download backup: {err}") from err
@ -129,16 +118,19 @@ class CloudBackupAgent(BackupAgent):
if not backup.protected:
raise BackupAgentError("Cloud backups must be protected")
base64md5hash = await _b64md5(await open_stream())
filename = self._get_backup_filename()
metadata = backup.as_dict()
size = backup.size
try:
base64md5hash = await calculate_b64md5(open_stream, size)
except FilesError as err:
raise BackupAgentError(err) from err
filename = f"{self._cloud.client.prefs.instance_id}.tar"
metadata = backup.as_dict()
tries = 1
while tries <= _RETRY_LIMIT:
try:
await self._cloud.files.upload(
storage_type=_STORAGE_BACKUP,
storage_type=StorageType.BACKUP,
open_stream=open_stream,
filename=filename,
base64md5hash=base64md5hash,
@ -179,27 +171,34 @@ class CloudBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups.
"""
if not await self.async_get_backup(backup_id):
if not (backup := await self._async_get_backup(backup_id)):
return
try:
await async_files_delete_file(
self._cloud,
storage_type=_STORAGE_BACKUP,
filename=self._get_backup_filename(),
storage_type=StorageType.BACKUP,
filename=backup["Key"],
)
except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to delete backup") from err
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
backups = await self._async_list_backups()
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
async def _async_list_backups(self) -> list[FilesHandlerListEntry]:
"""List backups."""
try:
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
_LOGGER.debug("Cloud backups: %s", backups)
backups = await async_files_list(
self._cloud, storage_type=StorageType.BACKUP
)
except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to list backups") from err
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
_LOGGER.debug("Cloud backups: %s", backups)
return backups
async def async_get_backup(
self,
@ -207,10 +206,19 @@ class CloudBackupAgent(BackupAgent):
**kwargs: Any,
) -> AgentBackup | None:
"""Return a backup."""
backups = await self.async_list_backups()
if not (backup := await self._async_get_backup(backup_id)):
return None
return AgentBackup.from_dict(backup["Metadata"])
async def _async_get_backup(
self,
backup_id: str,
) -> FilesHandlerListEntry | None:
"""Return a backup."""
backups = await self._async_list_backups()
for backup in backups:
if backup.backup_id == backup_id:
if backup["Metadata"]["backup_id"] == backup_id:
return backup
return None

View File

@ -12,12 +12,14 @@ if TYPE_CHECKING:
from hass_nabucasa import Cloud
from .client import CloudClient
from .helpers import FixedSizeQueueLogHandler
DOMAIN = "cloud"
DATA_CLOUD: HassKey[Cloud[CloudClient]] = HassKey(DOMAIN)
DATA_PLATFORMS_SETUP: HassKey[dict[str, asyncio.Event]] = HassKey(
"cloud_platforms_setup"
)
DATA_CLOUD_LOG_HANDLER: HassKey[FixedSizeQueueLogHandler] = HassKey("cloud_log_handler")
EVENT_CLOUD_EVENT = "cloud_event"
REQUEST_TIMEOUT = 10

View File

@ -0,0 +1,31 @@
"""Helpers for the cloud component."""
from collections import deque
import logging
from homeassistant.core import HomeAssistant
class FixedSizeQueueLogHandler(logging.Handler):
"""Log handler to store messages, with auto rotation."""
MAX_RECORDS = 500
def __init__(self) -> None:
"""Initialize a new LogHandler."""
super().__init__()
self._records: deque[logging.LogRecord] = deque(maxlen=self.MAX_RECORDS)
def emit(self, record: logging.LogRecord) -> None:
"""Store log message."""
self._records.append(record)
async def get_logs(self, hass: HomeAssistant) -> list[str]:
"""Get stored logs."""
def _get_logs() -> list[str]:
# copy the queue since it can mutate while iterating
records = self._records.copy()
return [self.format(record) for record in records]
return await hass.async_add_executor_job(_get_logs)

View File

@ -43,6 +43,7 @@ from .assist_pipeline import async_create_cloud_pipeline
from .client import CloudClient
from .const import (
DATA_CLOUD,
DATA_CLOUD_LOG_HANDLER,
EVENT_CLOUD_EVENT,
LOGIN_MFA_TIMEOUT,
PREF_ALEXA_REPORT_STATE,
@ -397,8 +398,11 @@ class DownloadSupportPackageView(HomeAssistantView):
url = "/api/cloud/support_package"
name = "api:cloud:support_package"
def _generate_markdown(
self, hass_info: dict[str, Any], domains_info: dict[str, dict[str, str]]
async def _generate_markdown(
self,
hass: HomeAssistant,
hass_info: dict[str, Any],
domains_info: dict[str, dict[str, str]],
) -> str:
def get_domain_table_markdown(domain_info: dict[str, Any]) -> str:
if len(domain_info) == 0:
@ -424,6 +428,17 @@ class DownloadSupportPackageView(HomeAssistantView):
"</details>\n\n"
)
log_handler = hass.data[DATA_CLOUD_LOG_HANDLER]
logs = "\n".join(await log_handler.get_logs(hass))
markdown += (
"## Full logs\n\n"
"<details><summary>Logs</summary>\n\n"
"```logs\n"
f"{logs}\n"
"```\n\n"
"</details>\n"
)
return markdown
async def get(self, request: web.Request) -> web.Response:
@ -433,7 +448,7 @@ class DownloadSupportPackageView(HomeAssistantView):
domain_health = await get_system_health_info(hass)
hass_info = domain_health.pop("homeassistant", {})
markdown = self._generate_markdown(hass_info, domain_health)
markdown = await self._generate_markdown(hass, hass_info, domain_health)
return web.Response(
body=markdown,

View File

@ -12,7 +12,7 @@
"documentation": "https://www.home-assistant.io/integrations/cloud",
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["hass_nabucasa"],
"requirements": ["hass-nabucasa==0.89.0"],
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==0.92.0"],
"single_config_entry": true
}

View File

@ -53,6 +53,7 @@ from homeassistant.helpers import (
)
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_state_added_domain
from homeassistant.util import language as language_util
from homeassistant.util.json import JsonObjectType, json_loads_object
from .chat_log import AssistantContent, async_get_chat_log
@ -914,26 +915,20 @@ class DefaultAgent(ConversationEntity):
def _load_intents(self, language: str) -> LanguageIntents | None:
"""Load all intents for language (run inside executor)."""
intents_dict: dict[str, Any] = {}
language_variant: str | None = None
supported_langs = set(get_languages())
# Choose a language variant upfront and commit to it for custom
# sentences, etc.
all_language_variants = {lang.lower(): lang for lang in supported_langs}
lang_matches = language_util.matches(language, supported_langs)
# en-US, en_US, en, ...
for maybe_variant in _get_language_variations(language):
matching_variant = all_language_variants.get(maybe_variant.lower())
if matching_variant:
language_variant = matching_variant
break
if not language_variant:
if not lang_matches:
_LOGGER.warning(
"Unable to find supported language variant for %s", language
)
return None
language_variant = lang_matches[0]
# Load intents for this language variant
lang_variant_intents = get_intents(language_variant, json_load=json_load)

View File

@ -23,14 +23,14 @@
}
},
"error": {
"discovery_error": "Failed to discover a Denon AVR Network Receiver"
"discovery_error": "Failed to discover a Denon AVR network receiver"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"cannot_connect": "Failed to connect, please try again, disconnecting mains power and ethernet cables and reconnecting them may help",
"not_denonavr_manufacturer": "Not a Denon AVR Network Receiver, discovered manufacturer did not match",
"not_denonavr_missing": "Not a Denon AVR Network Receiver, discovery information not complete"
"cannot_connect": "Failed to connect, please try again, disconnecting mains power and Ethernet cables and reconnecting them may help",
"not_denonavr_manufacturer": "Not a Denon AVR network receiver, discovered manufacturer did not match",
"not_denonavr_missing": "Not a Denon AVR network receiver, discovery information not complete"
}
},
"options": {
@ -64,7 +64,7 @@
"fields": {
"dynamic_eq": {
"name": "Dynamic equalizer",
"description": "True/false for enable/disable."
"description": "Whether DynamicEQ should be enabled or disabled."
}
}
},

View File

@ -60,12 +60,12 @@
"description": "Requests gas prices from easyEnergy.",
"fields": {
"config_entry": {
"name": "Config Entry",
"name": "Config entry",
"description": "The configuration entry to use for this action."
},
"incl_vat": {
"name": "VAT Included",
"description": "Include or exclude VAT in the prices, default is true."
"name": "VAT included",
"description": "Whether the prices should include VAT."
},
"start": {
"name": "Start",

View File

@ -23,7 +23,7 @@ from homeassistant.components.climate import (
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
from . import EconetConfigEntry
from .const import DOMAIN
@ -35,8 +35,13 @@ ECONET_STATE_TO_HA = {
ThermostatOperationMode.OFF: HVACMode.OFF,
ThermostatOperationMode.AUTO: HVACMode.HEAT_COOL,
ThermostatOperationMode.FAN_ONLY: HVACMode.FAN_ONLY,
ThermostatOperationMode.EMERGENCY_HEAT: HVACMode.HEAT,
}
HA_STATE_TO_ECONET = {
value: key
for key, value in ECONET_STATE_TO_HA.items()
if key != ThermostatOperationMode.EMERGENCY_HEAT
}
HA_STATE_TO_ECONET = {value: key for key, value in ECONET_STATE_TO_HA.items()}
ECONET_FAN_STATE_TO_HA = {
ThermostatFanMode.AUTO: FAN_AUTO,
@ -209,7 +214,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
def turn_aux_heat_on(self) -> None:
"""Turn auxiliary heater on."""
async_create_issue(
create_issue(
self.hass,
DOMAIN,
"migrate_aux_heat",
@ -223,7 +228,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
def turn_aux_heat_off(self) -> None:
"""Turn auxiliary heater off."""
async_create_issue(
create_issue(
self.hass,
DOMAIN,
"migrate_aux_heat",

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/econet",
"iot_class": "cloud_push",
"loggers": ["paho_mqtt", "pyeconet"],
"requirements": ["pyeconet==0.1.27"]
"requirements": ["pyeconet==0.1.28"]
}

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"]
"requirements": ["py-sucks==0.9.10", "deebot-client==12.1.0"]
}

View File

@ -250,7 +250,7 @@
"message": "Params are required for the command: {command}"
},
"vacuum_raw_get_positions_not_supported": {
"message": "Getting the positions of the chargers and the device itself is not supported"
"message": "Retrieving the positions of the chargers and the device itself is not supported"
}
},
"selector": {
@ -264,7 +264,7 @@
"services": {
"raw_get_positions": {
"name": "Get raw positions",
"description": "Get the raw response for the positions of the chargers and the device itself."
"description": "Retrieves a raw response containing the positions of the chargers and the device itself."
}
}
}

View File

@ -2,16 +2,18 @@
from __future__ import annotations
import asyncio
from collections.abc import Callable
from aiohttp import ClientError
from eheimdigital.device import EheimDigitalDevice
from eheimdigital.hub import EheimDigitalHub
from eheimdigital.types import EheimDeviceType
from eheimdigital.types import EheimDeviceType, EheimDigitalClientError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -43,12 +45,14 @@ class EheimDigitalUpdateCoordinator(
name=DOMAIN,
update_interval=DEFAULT_SCAN_INTERVAL,
)
self.main_device_added_event = asyncio.Event()
self.hub = EheimDigitalHub(
host=self.config_entry.data[CONF_HOST],
session=async_get_clientsession(hass),
loop=hass.loop,
receive_callback=self._async_receive_callback,
device_found_callback=self._async_device_found,
main_device_added_event=self.main_device_added_event,
)
self.known_devices: set[str] = set()
self.platform_callbacks: set[AsyncSetupDeviceEntitiesCallback] = set()
@ -76,8 +80,17 @@ class EheimDigitalUpdateCoordinator(
self.async_set_updated_data(self.hub.devices)
async def _async_setup(self) -> None:
await self.hub.connect()
await self.hub.update()
try:
await self.hub.connect()
async with asyncio.timeout(2):
# This event gets triggered when the first message is received from
# the device, it contains the data necessary to create the main device.
# This removes the race condition where the main device is accessed
# before the response from the device is parsed.
await self.main_device_added_event.wait()
await self.hub.update()
except (TimeoutError, EheimDigitalClientError) as err:
raise ConfigEntryNotReady from err
async def _async_update_data(self) -> dict[str, EheimDigitalDevice]:
try:

View File

@ -498,7 +498,11 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN):
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
"""Handle device found via zeroconf."""
host = discovery_info.host
host = (
f"[{discovery_info.ip_address}]"
if discovery_info.ip_address.version == 6
else str(discovery_info.ip_address)
)
https_port = (
int(discovery_info.port)
if discovery_info.port is not None

View File

@ -6,5 +6,5 @@
"iot_class": "local_push",
"loggers": ["sense_energy"],
"quality_scale": "internal",
"requirements": ["sense-energy==0.13.4"]
"requirements": ["sense-energy==0.13.5"]
}

View File

@ -16,7 +16,7 @@ class EnOceanEntity(Entity):
"""Initialize the device."""
self.dev_id = dev_id
async def async_added_to_hass(self):
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(

View File

@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
"iot_class": "local_polling",
"loggers": ["pyenphase"],
"requirements": ["pyenphase==1.23.1"],
"requirements": ["pyenphase==1.25.1"],
"zeroconf": [
{
"type": "_enphase-envoy._tcp.local."

View File

@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator
from .entity import EnvoyBaseEntity
from .entity import EnvoyBaseEntity, exception_handler
PARALLEL_UPDATES = 1
@ -192,6 +192,7 @@ class EnvoyRelaySelectEntity(EnvoyBaseEntity, SelectEntity):
"""Return the state of the Enpower switch."""
return self.entity_description.value_fn(self.relay)
@exception_handler
async def async_select_option(self, option: str) -> None:
"""Update the relay."""
await self.entity_description.update_fn(self.envoy, self.relay, option)
@ -243,6 +244,7 @@ class EnvoyStorageSettingsSelectEntity(EnvoyBaseEntity, SelectEntity):
assert self.data.tariff.storage_settings is not None
return self.entity_description.value_fn(self.data.tariff.storage_settings)
@exception_handler
async def async_select_option(self, option: str) -> None:
"""Update the relay."""
await self.entity_description.update_fn(self.envoy, option)

View File

@ -16,7 +16,7 @@
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
"mqtt": ["esphome/discover/#"],
"requirements": [
"aioesphomeapi==29.0.0",
"aioesphomeapi==29.0.2",
"esphome-dashboard-api==1.2.3",
"bleak-esphome==2.7.1"
],

View File

@ -4,50 +4,43 @@ from __future__ import annotations
from datetime import timedelta
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN
from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
from .coordinator import (
FireServiceConfigEntry,
FireServiceRotaClient,
FireServiceUpdateCoordinator,
)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: FireServiceConfigEntry) -> bool:
"""Set up FireServiceRota from a config entry."""
hass.data.setdefault(DOMAIN, {})
client = FireServiceRotaClient(hass, entry)
await client.setup()
if client.token_refresh_failure:
return False
entry.async_on_unload(client.async_stop_listener)
coordinator = FireServiceUpdateCoordinator(hass, client, entry)
await coordinator.async_config_entry_first_refresh()
hass.data[DOMAIN][entry.entry_id] = {
DATA_CLIENT: client,
DATA_COORDINATOR: coordinator,
}
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(
hass: HomeAssistant, entry: FireServiceConfigEntry
) -> bool:
"""Unload FireServiceRota config entry."""
await hass.async_add_executor_job(
hass.data[DOMAIN][entry.entry_id][DATA_CLIENT].websocket.stop_listener
)
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
del hass.data[DOMAIN][entry.entry_id]
return unload_ok
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -10,24 +10,22 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN as FIRESERVICEROTA_DOMAIN
from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
from .coordinator import (
FireServiceConfigEntry,
FireServiceRotaClient,
FireServiceUpdateCoordinator,
)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: FireServiceConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up FireServiceRota binary sensor based on a config entry."""
client: FireServiceRotaClient = hass.data[FIRESERVICEROTA_DOMAIN][entry.entry_id][
DATA_CLIENT
]
coordinator: FireServiceUpdateCoordinator = hass.data[FIRESERVICEROTA_DOMAIN][
entry.entry_id
][DATA_COORDINATOR]
coordinator = entry.runtime_data
client = coordinator.client
async_add_entities([ResponseBinarySensor(coordinator, client, entry)])

View File

@ -28,12 +28,19 @@ _LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
type FireServiceConfigEntry = ConfigEntry[FireServiceUpdateCoordinator]
class FireServiceUpdateCoordinator(DataUpdateCoordinator[dict | None]):
"""Data update coordinator for FireServiceRota."""
config_entry: FireServiceConfigEntry
def __init__(
self, hass: HomeAssistant, client: FireServiceRotaClient, entry: ConfigEntry
self,
hass: HomeAssistant,
client: FireServiceRotaClient,
entry: FireServiceConfigEntry,
) -> None:
"""Initialize the FireServiceRota DataUpdateCoordinator."""
super().__init__(
@ -213,3 +220,7 @@ class FireServiceRotaClient:
)
await self.update_call(self.fsr.set_incident_response, self.incident_id, value)
async def async_stop_listener(self) -> None:
"""Stop listener."""
await self._hass.async_add_executor_job(self.websocket.stop_listener)

View File

@ -4,27 +4,24 @@ import logging
from typing import Any
from homeassistant.components.sensor import SensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity
from .const import DATA_CLIENT, DOMAIN as FIRESERVICEROTA_DOMAIN
from .coordinator import FireServiceRotaClient
from .const import DOMAIN as FIRESERVICEROTA_DOMAIN
from .coordinator import FireServiceConfigEntry, FireServiceRotaClient
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: FireServiceConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up FireServiceRota sensor based on a config entry."""
client = hass.data[FIRESERVICEROTA_DOMAIN][entry.entry_id][DATA_CLIENT]
async_add_entities([IncidentsSensor(client)])
async_add_entities([IncidentsSensor(entry.runtime_data.client)])
# pylint: disable-next=hass-invalid-inheritance # needs fixing

View File

@ -9,21 +9,24 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN as FIRESERVICEROTA_DOMAIN
from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
from .const import DOMAIN as FIRESERVICEROTA_DOMAIN
from .coordinator import (
FireServiceConfigEntry,
FireServiceRotaClient,
FireServiceUpdateCoordinator,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: FireServiceConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up FireServiceRota switch based on a config entry."""
client = hass.data[FIRESERVICEROTA_DOMAIN][entry.entry_id][DATA_CLIENT]
coordinator = hass.data[FIRESERVICEROTA_DOMAIN][entry.entry_id][DATA_COORDINATOR]
coordinator = entry.runtime_data
client = coordinator.client
async_add_entities([ResponseSwitch(coordinator, client, entry)])

View File

@ -47,6 +47,10 @@ async def async_setup_entry(
)
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
class FlexitBinarySensor(FlexitEntity, BinarySensorEntity):
"""Representation of a Flexit binary Sensor."""

View File

@ -25,6 +25,7 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import (
DOMAIN,
MAX_TEMP,
MIN_TEMP,
PRESET_TO_VENTILATION_MODE_MAP,
@ -43,6 +44,9 @@ async def async_setup_entry(
async_add_entities([FlexitClimateEntity(config_entry.runtime_data)])
PARALLEL_UPDATES = 1
class FlexitClimateEntity(FlexitEntity, ClimateEntity):
"""Flexit air handling unit."""
@ -130,7 +134,13 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
try:
await self.device.set_ventilation_mode(ventilation_mode)
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_preset_mode",
translation_placeholders={
"preset": str(ventilation_mode),
},
) from exc
finally:
await self.coordinator.async_refresh()
@ -150,6 +160,12 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
else:
await self.device.set_ventilation_mode(VENTILATION_MODE_HOME)
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_hvac_mode",
translation_placeholders={
"mode": str(hvac_mode),
},
) from exc
finally:
await self.coordinator.async_refresh()

View File

@ -49,7 +49,11 @@ class FlexitCoordinator(DataUpdateCoordinator[FlexitBACnet]):
await self.device.update()
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise ConfigEntryNotReady(
f"Timeout while connecting to {self.config_entry.data[CONF_IP_ADDRESS]}"
translation_domain=DOMAIN,
translation_key="not_ready",
translation_placeholders={
"ip": str(self.config_entry.data[CONF_IP_ADDRESS]),
},
) from exc
return self.device

View File

@ -6,5 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/flexit_bacnet",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["flexit_bacnet==2.2.3"]
}

View File

@ -18,6 +18,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import FlexitConfigEntry, FlexitCoordinator
from .entity import FlexitEntity
@ -205,6 +206,9 @@ async def async_setup_entry(
)
PARALLEL_UPDATES = 1
class FlexitNumber(FlexitEntity, NumberEntity):
"""Representation of a Flexit Number."""
@ -246,6 +250,12 @@ class FlexitNumber(FlexitEntity, NumberEntity):
try:
await set_native_value_fn(int(value))
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_value_error",
translation_placeholders={
"value": str(value),
},
) from exc
finally:
await self.coordinator.async_refresh()

View File

@ -0,0 +1,91 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
Integration does not define custom actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not use any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities don't subscribe to events explicitly
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup:
status: done
comment: |
Done implicitly with `await coordinator.async_config_entry_first_refresh()`.
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
Integration does not use options flow.
docs-installation-parameters: done
entity-unavailable:
status: done
comment: |
Done implicitly with coordinator.
integration-owner: done
log-when-unavailable:
status: done
comment: |
Done implicitly with coordinator.
parallel-updates: done
reauthentication-flow:
status: exempt
comment: |
Integration doesn't require any form of authentication.
test-coverage: todo
# Gold
entity-translations: done
entity-device-class: done
devices: done
entity-category: todo
entity-disabled-by-default: todo
discovery: todo
stale-devices:
status: exempt
comment: |
Device type integration.
diagnostics: todo
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
dynamic-devices:
status: exempt
comment: |
Device type integration.
discovery-update-info: todo
repair-issues:
status: exempt
comment: |
This is not applicable for this integration.
docs-use-cases: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-data-update: done
docs-known-limitations: todo
docs-troubleshooting: todo
docs-examples: todo
# Platinum
async-dependency: todo
inject-websession: todo
strict-typing: done

View File

@ -161,6 +161,10 @@ async def async_setup_entry(
)
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
class FlexitSensor(FlexitEntity, SensorEntity):
"""Representation of a Flexit (bacnet) Sensor."""

View File

@ -5,6 +5,10 @@
"data": {
"ip_address": "[%key:common::config_flow::data::ip%]",
"device_id": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"ip_address": "The IP address of the Flexit Nordic device",
"device_id": "The device ID of the Flexit Nordic device"
}
}
},
@ -115,5 +119,22 @@
"name": "Cooker hood mode"
}
}
},
"exceptions": {
"set_value_error": {
"message": "Failed setting the value {value}."
},
"switch_turn": {
"message": "Failed to turn the switch {state}."
},
"set_preset_mode": {
"message": "Failed to set preset mode {preset}."
},
"set_hvac_mode": {
"message": "Failed to set HVAC mode {mode}."
},
"not_ready": {
"message": "Timeout while connecting to {ip}."
}
}
}

View File

@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import FlexitConfigEntry, FlexitCoordinator
from .entity import FlexitEntity
@ -68,6 +69,9 @@ async def async_setup_entry(
)
PARALLEL_UPDATES = 1
class FlexitSwitch(FlexitEntity, SwitchEntity):
"""Representation of a Flexit Switch."""
@ -94,19 +98,31 @@ class FlexitSwitch(FlexitEntity, SwitchEntity):
return self.entity_description.is_on_fn(self.coordinator.data)
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn electric heater on."""
"""Turn switch on."""
try:
await self.entity_description.turn_on_fn(self.coordinator.data)
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="switch_turn",
translation_placeholders={
"state": "on",
},
) from exc
finally:
await self.coordinator.async_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn electric heater off."""
"""Turn switch off."""
try:
await self.entity_description.turn_off_fn(self.coordinator.data)
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="switch_turn",
translation_placeholders={
"state": "off",
},
) from exc
finally:
await self.coordinator.async_refresh()

View File

@ -45,10 +45,10 @@ class FloEntity(Entity):
"""Return True if device is available."""
return self._device.available
async def async_update(self):
async def async_update(self) -> None:
"""Update Flo entity."""
await self._device.async_request_refresh()
async def async_added_to_hass(self):
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
self.async_on_remove(self._device.async_add_listener(self.async_write_ha_state))

View File

@ -36,11 +36,11 @@
"issues": {
"import_failed_not_allowed_path": {
"title": "The Folder Watcher YAML configuration could not be imported",
"description": "Configuring Folder Watcher using YAML is being removed but your configuration could not be imported as the folder {path} is not in the configured allowlist.\n\nPlease add it to `{config_variable}` in config.yaml and restart Home Assistant to import it and fix this issue."
"description": "Configuring Folder Watcher using YAML is being removed but your configuration could not be imported as the folder {path} is not in the configured allowlist.\n\nPlease add it to `{config_variable}` in configuration.yaml and restart Home Assistant to import it and fix this issue."
},
"setup_not_allowed_path": {
"title": "The Folder Watcher configuration for {path} could not start",
"description": "The path {path} is not accessible or not allowed to be accessed.\n\nPlease check the path is accessible and add it to `{config_variable}` in config.yaml and restart Home Assistant to fix this issue."
"description": "The path {path} is not accessible or not allowed to be accessed.\n\nPlease check the path is accessible and add it to `{config_variable}` in configuration.yaml and restart Home Assistant to fix this issue."
}
},
"entity": {

View File

@ -3,8 +3,13 @@
from __future__ import annotations
import asyncio
from collections.abc import Sequence
import logging
from typing import Any
from pyforked_daapd import ForkedDaapdAPI
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_send
@ -26,15 +31,15 @@ WEBSOCKET_RECONNECT_TIME = 30 # seconds
class ForkedDaapdUpdater:
"""Manage updates for the forked-daapd device."""
def __init__(self, hass, api, entry_id):
def __init__(self, hass: HomeAssistant, api: ForkedDaapdAPI, entry_id: str) -> None:
"""Initialize."""
self.hass = hass
self._api = api
self.websocket_handler = None
self._all_output_ids = set()
self.websocket_handler: asyncio.Task[None] | None = None
self._all_output_ids: set[str] = set()
self._entry_id = entry_id
async def async_init(self):
async def async_init(self) -> None:
"""Perform async portion of class initialization."""
if not (server_config := await self._api.get_request("config")):
raise PlatformNotReady
@ -51,7 +56,7 @@ class ForkedDaapdUpdater:
else:
_LOGGER.error("Invalid websocket port")
async def _disconnected_callback(self):
async def _disconnected_callback(self) -> None:
"""Send update signals when the websocket gets disconnected."""
async_dispatcher_send(
self.hass, SIGNAL_UPDATE_MASTER.format(self._entry_id), False
@ -60,9 +65,9 @@ class ForkedDaapdUpdater:
self.hass, SIGNAL_UPDATE_OUTPUTS.format(self._entry_id), []
)
async def _update(self, update_types):
async def _update(self, update_types_sequence: Sequence[str]) -> None:
"""Private update method."""
update_types = set(update_types)
update_types = set(update_types_sequence)
update_events = {}
_LOGGER.debug("Updating %s", update_types)
if (
@ -127,8 +132,8 @@ class ForkedDaapdUpdater:
self.hass, SIGNAL_UPDATE_MASTER.format(self._entry_id), True
)
def _add_zones(self, outputs):
outputs_to_add = []
def _add_zones(self, outputs: list[dict[str, Any]]) -> None:
outputs_to_add: list[dict[str, Any]] = []
for output in outputs:
if output["id"] not in self._all_output_ids:
self._all_output_ids.add(output["id"])

View File

@ -85,9 +85,9 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up forked-daapd from a config entry."""
host = config_entry.data[CONF_HOST]
port = config_entry.data[CONF_PORT]
password = config_entry.data[CONF_PASSWORD]
host: str = config_entry.data[CONF_HOST]
port: int = config_entry.data[CONF_PORT]
password: str = config_entry.data[CONF_PASSWORD]
forked_daapd_api = ForkedDaapdAPI(
async_get_clientsession(hass), host, port, password
)
@ -95,8 +95,6 @@ async def async_setup_entry(
clientsession=async_get_clientsession(hass),
api=forked_daapd_api,
ip_address=host,
api_port=port,
api_password=password,
config_entry=config_entry,
)
@ -240,9 +238,7 @@ class ForkedDaapdMaster(MediaPlayerEntity):
_attr_should_poll = False
def __init__(
self, clientsession, api, ip_address, api_port, api_password, config_entry
):
def __init__(self, clientsession, api, ip_address, config_entry):
"""Initialize the ForkedDaapd Master Device."""
# Leave the api public so the browse media helpers can use it
self.api = api
@ -269,7 +265,7 @@ class ForkedDaapdMaster(MediaPlayerEntity):
self._on_remove = None
self._available = False
self._clientsession = clientsession
self._config_entry = config_entry
self._entry_id = config_entry.entry_id
self.update_options(config_entry.options)
self._paused_event = asyncio.Event()
self._pause_requested = False
@ -282,42 +278,42 @@ class ForkedDaapdMaster(MediaPlayerEntity):
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_PLAYER.format(self._config_entry.entry_id),
SIGNAL_UPDATE_PLAYER.format(self._entry_id),
self._update_player,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_QUEUE.format(self._config_entry.entry_id),
SIGNAL_UPDATE_QUEUE.format(self._entry_id),
self._update_queue,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_OUTPUTS.format(self._config_entry.entry_id),
SIGNAL_UPDATE_OUTPUTS.format(self._entry_id),
self._update_outputs,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_MASTER.format(self._config_entry.entry_id),
SIGNAL_UPDATE_MASTER.format(self._entry_id),
self._update_callback,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_CONFIG_OPTIONS_UPDATE.format(self._config_entry.entry_id),
SIGNAL_CONFIG_OPTIONS_UPDATE.format(self._entry_id),
self.update_options,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_DATABASE.format(self._config_entry.entry_id),
SIGNAL_UPDATE_DATABASE.format(self._entry_id),
self._update_database,
)
)
@ -411,9 +407,9 @@ class ForkedDaapdMaster(MediaPlayerEntity):
self._track_info = defaultdict(str)
@property
def unique_id(self):
def unique_id(self) -> str:
"""Return unique ID."""
return self._config_entry.entry_id
return self._entry_id
@property
def available(self) -> bool:

View File

@ -35,7 +35,7 @@
"services": {
"ptz": {
"name": "PTZ",
"description": "Pan/Tilt action for Foscam camera.",
"description": "Moves a Foscam camera to a specified direction.",
"fields": {
"movement": {
"name": "Movement",
@ -49,7 +49,7 @@
},
"ptz_preset": {
"name": "PTZ preset",
"description": "PTZ Preset action for Foscam camera.",
"description": "Moves a Foscam camera to a predefined position.",
"fields": {
"preset_name": {
"name": "Preset name",

View File

@ -196,6 +196,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
self.hass = hass
self.host = host
self.mesh_role = MeshRoles.NONE
self.mesh_wifi_uplink = False
self.device_conn_type: str | None = None
self.device_is_router: bool = False
self.password = password
@ -610,6 +611,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
ssid=interf.get("ssid", ""),
type=interf["type"],
)
if interf["type"].lower() == "wlan" and interf[
"name"
].lower().startswith("uplink"):
self.mesh_wifi_uplink = True
if dr.format_mac(int_mac) == self.mac:
self.mesh_role = MeshRoles(node["mesh_role"])

View File

@ -207,8 +207,9 @@ async def async_all_entities_list(
local_ip: str,
) -> list[Entity]:
"""Get a list of all entities."""
if avm_wrapper.mesh_role == MeshRoles.SLAVE:
if not avm_wrapper.mesh_wifi_uplink:
return [*await _async_wifi_entities_list(avm_wrapper, device_friendly_name)]
return []
return [
@ -565,6 +566,9 @@ class FritzBoxWifiSwitch(FritzBoxBaseSwitch):
self._attributes = {}
self._attr_entity_category = EntityCategory.CONFIG
self._attr_entity_registry_enabled_default = (
avm_wrapper.mesh_role is not MeshRoles.SLAVE
)
self._network_num = network_num
switch_info = SwitchInfo(

View File

@ -21,5 +21,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20250210.0"]
"requirements": ["home-assistant-frontend==20250214.0"]
}

View File

@ -4,25 +4,27 @@ from __future__ import annotations
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from .const import DOMAIN, PLATFORMS
from .manager import GeoJsonFeedEntityManager
from .const import PLATFORMS
from .manager import GeoJsonConfigEntry, GeoJsonFeedEntityManager
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
async def async_setup_entry(
hass: HomeAssistant, config_entry: GeoJsonConfigEntry
) -> bool:
"""Set up the GeoJSON events component as config entry."""
feeds = hass.data.setdefault(DOMAIN, {})
# Create feed entity manager for all platforms.
manager = GeoJsonFeedEntityManager(hass, config_entry)
feeds[config_entry.entry_id] = manager
_LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id)
await remove_orphaned_entities(hass, config_entry.entry_id)
config_entry.runtime_data = manager
config_entry.async_on_unload(manager.async_stop)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
await manager.async_init()
return True
@ -46,10 +48,6 @@ async def remove_orphaned_entities(hass: HomeAssistant, entry_id: str) -> None:
entity_registry.async_remove(entry.entity_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: GeoJsonConfigEntry) -> bool:
"""Unload the GeoJSON events config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
manager: GeoJsonFeedEntityManager = hass.data[DOMAIN].pop(entry.entry_id)
await manager.async_stop()
return unload_ok
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -9,31 +9,24 @@ from typing import Any
from aio_geojson_generic_client.feed_entry import GenericFeedEntry
from homeassistant.components.geo_location import GeolocationEvent
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import UnitOfLength
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import GeoJsonFeedEntityManager
from .const import (
ATTR_EXTERNAL_ID,
DOMAIN,
SIGNAL_DELETE_ENTITY,
SIGNAL_UPDATE_ENTITY,
SOURCE,
)
from .const import ATTR_EXTERNAL_ID, SIGNAL_DELETE_ENTITY, SIGNAL_UPDATE_ENTITY, SOURCE
from .manager import GeoJsonConfigEntry, GeoJsonFeedEntityManager
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: GeoJsonConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the GeoJSON Events platform."""
manager: GeoJsonFeedEntityManager = hass.data[DOMAIN][entry.entry_id]
manager = entry.runtime_data
@callback
def async_add_geolocation(

View File

@ -25,6 +25,8 @@ from .const import (
_LOGGER = logging.getLogger(__name__)
type GeoJsonConfigEntry = ConfigEntry[GeoJsonFeedEntityManager]
class GeoJsonFeedEntityManager:
"""Feed Entity Manager for GeoJSON feeds."""

View File

@ -10,7 +10,7 @@ from google.oauth2.credentials import Credentials
import voluptuous as vol
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, Platform
from homeassistant.core import (
HomeAssistant,
@ -99,12 +99,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
hass.data[DOMAIN].pop(entry.entry_id)
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
if not hass.config_entries.async_loaded_entries(DOMAIN):
for service_name in hass.services.async_services_for_domain(DOMAIN):
hass.services.async_remove(DOMAIN, service_name)

View File

@ -2,7 +2,7 @@
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, discovery
@ -59,12 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -
async def async_unload_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -> bool:
"""Unload a config entry."""
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
if not hass.config_entries.async_loaded_entries(DOMAIN):
for service_name in hass.services.async_services_for_domain(DOMAIN):
hass.services.async_remove(DOMAIN, service_name)

View File

@ -12,7 +12,7 @@ from gspread.exceptions import APIError
from gspread.utils import ValueInputOption
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import (
@ -81,12 +81,7 @@ async def async_unload_entry(
hass: HomeAssistant, entry: GoogleSheetsConfigEntry
) -> bool:
"""Unload a config entry."""
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
if not hass.config_entries.async_loaded_entries(DOMAIN):
for service_name in hass.services.async_services_for_domain(DOMAIN):
hass.services.async_remove(DOMAIN, service_name)

View File

@ -11,7 +11,7 @@ from aioguardian import Client
from aioguardian.errors import GuardianError
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_DEVICE_ID,
CONF_DEVICE_ID,
@ -247,12 +247,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
if not hass.config_entries.async_loaded_entries(DOMAIN):
# If this is the last loaded instance of Guardian, deregister any services
# defined during integration setup:
for service_name in SERVICES:

View File

@ -43,7 +43,7 @@ class HabiticaImage(HabiticaBase, ImageEntity):
translation_key=HabiticaImageEntity.AVATAR,
)
_attr_content_type = "image/png"
_current_appearance: Avatar | None = None
_avatar: Avatar | None = None
_cache: bytes | None = None
def __init__(
@ -55,13 +55,13 @@ class HabiticaImage(HabiticaBase, ImageEntity):
super().__init__(coordinator, self.entity_description)
ImageEntity.__init__(self, hass)
self._attr_image_last_updated = dt_util.utcnow()
self._avatar = extract_avatar(self.coordinator.data.user)
def _handle_coordinator_update(self) -> None:
"""Check if equipped gear and other things have changed since last avatar image generation."""
new_appearance = extract_avatar(self.coordinator.data.user)
if self._current_appearance != new_appearance:
self._current_appearance = new_appearance
if self._avatar != self.coordinator.data.user:
self._avatar = extract_avatar(self.coordinator.data.user)
self._attr_image_last_updated = dt_util.utcnow()
self._cache = None
@ -69,8 +69,6 @@ class HabiticaImage(HabiticaBase, ImageEntity):
async def async_image(self) -> bytes | None:
"""Return cached bytes, otherwise generate new avatar."""
if not self._cache and self._current_appearance:
self._cache = await self.coordinator.generate_avatar(
self._current_appearance
)
if not self._cache and self._avatar:
self._cache = await self.coordinator.generate_avatar(self._avatar)
return self._cache

View File

@ -6,5 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/habitica",
"iot_class": "cloud_polling",
"loggers": ["habiticalib"],
"quality_scale": "platinum",
"requirements": ["habiticalib==0.3.7"]
}

View File

@ -51,7 +51,7 @@ rules:
status: exempt
comment: No supportable devices.
docs-supported-functions: done
docs-troubleshooting: todo
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt

View File

@ -69,3 +69,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_remove_config_entry_device(
hass: HomeAssistant, entry: HeosConfigEntry, device: dr.DeviceEntry
) -> bool:
"""Remove config entry from device if no longer present."""
return not any(
(domain, key)
for domain, key in device.identifiers
if domain == DOMAIN and int(key) in entry.runtime_data.heos.players
)

View File

@ -16,6 +16,7 @@ from pyheos import (
HeosError,
HeosNowPlayingMedia,
HeosOptions,
HeosPlayer,
MediaItem,
MediaType,
PlayerUpdateResult,
@ -58,6 +59,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
credentials=credentials,
)
)
self._platform_callbacks: list[Callable[[Sequence[HeosPlayer]], None]] = []
self._update_sources_pending: bool = False
self._source_list: list[str] = []
self._favorites: dict[int, MediaItem] = {}
@ -124,6 +126,27 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
self.async_update_listeners()
return remove_listener
def async_add_platform_callback(
self, add_entities_callback: Callable[[Sequence[HeosPlayer]], None]
) -> None:
"""Add a callback to add entities for a platform."""
self._platform_callbacks.append(add_entities_callback)
def _async_handle_player_update_result(
self, update_result: PlayerUpdateResult
) -> None:
"""Handle a player update result."""
if update_result.added_player_ids and self._platform_callbacks:
new_players = [
self.heos.players[player_id]
for player_id in update_result.added_player_ids
]
for add_entities_callback in self._platform_callbacks:
add_entities_callback(new_players)
if update_result.updated_player_ids:
self._async_update_player_ids(update_result.updated_player_ids)
async def _async_on_auth_failure(self) -> None:
"""Handle when the user credentials are no longer valid."""
assert self.config_entry is not None
@ -147,8 +170,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
"""Handle a controller event, such as players or groups changed."""
if event == const.EVENT_PLAYERS_CHANGED:
assert data is not None
if data.updated_player_ids:
self._async_update_player_ids(data.updated_player_ids)
self._async_handle_player_update_result(data)
elif (
event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED)
and not self._update_sources_pending
@ -242,9 +264,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
except HeosError as error:
_LOGGER.error("Unable to refresh players: %s", error)
return
# After reconnecting, player_id may have changed
if player_updates.updated_player_ids:
self._async_update_player_ids(player_updates.updated_player_ids)
self._async_handle_player_update_result(player_updates)
@callback
def async_get_source_list(self) -> list[str]:

View File

@ -2,7 +2,7 @@
from __future__ import annotations
from collections.abc import Awaitable, Callable, Coroutine
from collections.abc import Awaitable, Callable, Coroutine, Sequence
from datetime import datetime
from functools import reduce, wraps
from operator import ior
@ -93,11 +93,16 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Add media players for a config entry."""
devices = [
HeosMediaPlayer(entry.runtime_data, player)
for player in entry.runtime_data.heos.players.values()
]
async_add_entities(devices)
def add_entities_callback(players: Sequence[HeosPlayer]) -> None:
"""Add entities for each player."""
async_add_entities(
[HeosMediaPlayer(entry.runtime_data, player) for player in players]
)
coordinator = entry.runtime_data
coordinator.async_add_platform_callback(add_entities_callback)
add_entities_callback(list(coordinator.heos.players.values()))
type _FuncType[**_P] = Callable[_P, Awaitable[Any]]

View File

@ -49,7 +49,7 @@ rules:
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices: todo
dynamic-devices: done
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
@ -57,8 +57,8 @@ rules:
exception-translations: done
icon-translations: done
reconfiguration-flow: done
repair-issues: todo
stale-devices: todo
repair-issues: done
stale-devices: done
# Platinum
async-dependency: done
inject-websession:

View File

@ -9,5 +9,5 @@
},
"iot_class": "cloud_polling",
"loggers": ["apyhiveapi"],
"requirements": ["pyhive-integration==1.0.1"]
"requirements": ["pyhive-integration==1.0.2"]
}

View File

@ -35,7 +35,7 @@ class SW16Entity(Entity):
self.async_write_ha_state()
@property
def available(self):
def available(self) -> bool:
"""Return True if entity is available."""
return bool(self._client.is_connected)
@ -44,7 +44,7 @@ class SW16Entity(Entity):
"""Update availability state."""
self.async_write_ha_state()
async def async_added_to_hass(self):
async def async_added_to_hass(self) -> None:
"""Register update callback."""
self._client.register_status_callback(
self.handle_event_callback, self._device_port

View File

@ -2,11 +2,19 @@
from __future__ import annotations
from collections.abc import Awaitable
import logging
from typing import Any, cast
from aiohomeconnect.client import Client as HomeConnectClient
from aiohomeconnect.model import CommandKey, Option, OptionKey, ProgramKey, SettingKey
from aiohomeconnect.model import (
ArrayOfOptions,
CommandKey,
Option,
OptionKey,
ProgramKey,
SettingKey,
)
from aiohomeconnect.model.error import HomeConnectError
import voluptuous as vol
@ -19,34 +27,74 @@ from homeassistant.helpers import (
device_registry as dr,
)
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
from homeassistant.helpers.issue_registry import (
IssueSeverity,
async_create_issue,
async_delete_issue,
)
from homeassistant.helpers.typing import ConfigType
from .api import AsyncConfigEntryAuth
from .const import (
AFFECTS_TO_ACTIVE_PROGRAM,
AFFECTS_TO_SELECTED_PROGRAM,
ATTR_AFFECTS_TO,
ATTR_KEY,
ATTR_PROGRAM,
ATTR_UNIT,
ATTR_VALUE,
DOMAIN,
OLD_NEW_UNIQUE_ID_SUFFIX_MAP,
PROGRAM_ENUM_OPTIONS,
SERVICE_OPTION_ACTIVE,
SERVICE_OPTION_SELECTED,
SERVICE_PAUSE_PROGRAM,
SERVICE_RESUME_PROGRAM,
SERVICE_SELECT_PROGRAM,
SERVICE_SET_PROGRAM_AND_OPTIONS,
SERVICE_SETTING,
SERVICE_START_PROGRAM,
SVE_TRANSLATION_PLACEHOLDER_KEY,
SVE_TRANSLATION_PLACEHOLDER_PROGRAM,
SVE_TRANSLATION_PLACEHOLDER_VALUE,
TRANSLATION_KEYS_PROGRAMS_MAP,
)
from .coordinator import HomeConnectConfigEntry, HomeConnectCoordinator
from .utils import get_dict_from_home_connect_error
from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
PROGRAM_OPTIONS = {
bsh_key_to_translation_key(key): (
key,
value,
)
for key, value in {
OptionKey.BSH_COMMON_DURATION: int,
OptionKey.BSH_COMMON_START_IN_RELATIVE: int,
OptionKey.BSH_COMMON_FINISH_IN_RELATIVE: int,
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_FILL_QUANTITY: int,
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_MULTIPLE_BEVERAGES: bool,
OptionKey.DISHCARE_DISHWASHER_INTENSIV_ZONE: bool,
OptionKey.DISHCARE_DISHWASHER_BRILLIANCE_DRY: bool,
OptionKey.DISHCARE_DISHWASHER_VARIO_SPEED_PLUS: bool,
OptionKey.DISHCARE_DISHWASHER_SILENCE_ON_DEMAND: bool,
OptionKey.DISHCARE_DISHWASHER_HALF_LOAD: bool,
OptionKey.DISHCARE_DISHWASHER_EXTRA_DRY: bool,
OptionKey.DISHCARE_DISHWASHER_HYGIENE_PLUS: bool,
OptionKey.DISHCARE_DISHWASHER_ECO_DRY: bool,
OptionKey.DISHCARE_DISHWASHER_ZEOLITE_DRY: bool,
OptionKey.COOKING_OVEN_SETPOINT_TEMPERATURE: int,
OptionKey.COOKING_OVEN_FAST_PRE_HEAT: bool,
OptionKey.LAUNDRY_CARE_WASHER_I_DOS_1_ACTIVE: bool,
OptionKey.LAUNDRY_CARE_WASHER_I_DOS_2_ACTIVE: bool,
}.items()
}
SERVICE_SETTING_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): str,
@ -58,6 +106,7 @@ SERVICE_SETTING_SCHEMA = vol.Schema(
}
)
# DEPRECATED: Remove in 2025.9.0
SERVICE_OPTION_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): str,
@ -70,6 +119,7 @@ SERVICE_OPTION_SCHEMA = vol.Schema(
}
)
# DEPRECATED: Remove in 2025.9.0
SERVICE_PROGRAM_SCHEMA = vol.Any(
{
vol.Required(ATTR_DEVICE_ID): str,
@ -93,6 +143,46 @@ SERVICE_PROGRAM_SCHEMA = vol.Any(
},
)
def _require_program_or_at_least_one_option(data: dict) -> dict:
if ATTR_PROGRAM not in data and not any(
option_key in data for option_key in (PROGRAM_ENUM_OPTIONS | PROGRAM_OPTIONS)
):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="required_program_or_one_option_at_least",
)
return data
SERVICE_PROGRAM_AND_OPTIONS_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): str,
vol.Required(ATTR_AFFECTS_TO): vol.In(
[AFFECTS_TO_ACTIVE_PROGRAM, AFFECTS_TO_SELECTED_PROGRAM]
),
vol.Optional(ATTR_PROGRAM): vol.In(TRANSLATION_KEYS_PROGRAMS_MAP.keys()),
}
)
.extend(
{
vol.Optional(translation_key): vol.In(allowed_values.keys())
for translation_key, (
key,
allowed_values,
) in PROGRAM_ENUM_OPTIONS.items()
}
)
.extend(
{
vol.Optional(translation_key): schema
for translation_key, (key, schema) in PROGRAM_OPTIONS.items()
}
),
_require_program_or_at_least_one_option,
)
SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str})
PLATFORMS = [
@ -144,7 +234,7 @@ async def _get_client_and_ha_id(
return entry.runtime_data.client, ha_id
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901
"""Set up Home Connect component."""
async def _async_service_program(call: ServiceCall, start: bool):
@ -165,6 +255,57 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
else None
)
async_create_issue(
hass,
DOMAIN,
"deprecated_set_program_and_option_actions",
breaks_in_ha_version="2025.9.0",
is_fixable=True,
is_persistent=True,
severity=IssueSeverity.WARNING,
translation_key="deprecated_set_program_and_option_actions",
translation_placeholders={
"new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS,
"remove_release": "2025.9.0",
"deprecated_action_yaml": "\n".join(
[
"```yaml",
f"action: {DOMAIN}.{SERVICE_START_PROGRAM if start else SERVICE_SELECT_PROGRAM}",
"data:",
f" {ATTR_DEVICE_ID}: DEVICE_ID",
f" {ATTR_PROGRAM}: {program}",
*([f" {ATTR_KEY}: {options[0].key}"] if options else []),
*([f" {ATTR_VALUE}: {options[0].value}"] if options else []),
*(
[f" {ATTR_UNIT}: {options[0].unit}"]
if options and options[0].unit
else []
),
"```",
]
),
"new_action_yaml": "\n ".join(
[
"```yaml",
f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}",
"data:",
f" {ATTR_DEVICE_ID}: DEVICE_ID",
f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if start else AFFECTS_TO_SELECTED_PROGRAM}",
f" {ATTR_PROGRAM}: {bsh_key_to_translation_key(program.value)}",
*(
[
f" {bsh_key_to_translation_key(options[0].key)}: {options[0].value}"
]
if options
else []
),
"```",
]
),
"repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)",
},
)
try:
if start:
await client.start_program(ha_id, program_key=program, options=options)
@ -189,6 +330,44 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
unit = call.data.get(ATTR_UNIT)
client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID])
async_create_issue(
hass,
DOMAIN,
"deprecated_set_program_and_option_actions",
breaks_in_ha_version="2025.9.0",
is_fixable=True,
is_persistent=True,
severity=IssueSeverity.WARNING,
translation_key="deprecated_set_program_and_option_actions",
translation_placeholders={
"new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS,
"remove_release": "2025.9.0",
"deprecated_action_yaml": "\n".join(
[
"```yaml",
f"action: {DOMAIN}.{SERVICE_OPTION_ACTIVE if active else SERVICE_OPTION_SELECTED}",
"data:",
f" {ATTR_DEVICE_ID}: DEVICE_ID",
f" {ATTR_KEY}: {option_key}",
f" {ATTR_VALUE}: {value}",
*([f" {ATTR_UNIT}: {unit}"] if unit else []),
"```",
]
),
"new_action_yaml": "\n ".join(
[
"```yaml",
f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}",
"data:",
f" {ATTR_DEVICE_ID}: DEVICE_ID",
f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if active else AFFECTS_TO_SELECTED_PROGRAM}",
f" {bsh_key_to_translation_key(option_key)}: {value}",
"```",
]
),
"repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)",
},
)
try:
if active:
await client.set_active_program_option(
@ -272,6 +451,76 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Service for selecting a program."""
await _async_service_program(call, False)
async def async_service_set_program_and_options(call: ServiceCall):
"""Service for setting a program and options."""
data = dict(call.data)
program = data.pop(ATTR_PROGRAM, None)
affects_to = data.pop(ATTR_AFFECTS_TO)
client, ha_id = await _get_client_and_ha_id(hass, data.pop(ATTR_DEVICE_ID))
options: list[Option] = []
for option, value in data.items():
if option in PROGRAM_ENUM_OPTIONS:
options.append(
Option(
PROGRAM_ENUM_OPTIONS[option][0],
PROGRAM_ENUM_OPTIONS[option][1][value],
)
)
elif option in PROGRAM_OPTIONS:
option_key = PROGRAM_OPTIONS[option][0]
options.append(Option(option_key, value))
method_call: Awaitable[Any]
exception_translation_key: str
if program:
program = (
program
if isinstance(program, ProgramKey)
else TRANSLATION_KEYS_PROGRAMS_MAP[program]
)
if affects_to == AFFECTS_TO_ACTIVE_PROGRAM:
method_call = client.start_program(
ha_id, program_key=program, options=options
)
exception_translation_key = "start_program"
elif affects_to == AFFECTS_TO_SELECTED_PROGRAM:
method_call = client.set_selected_program(
ha_id, program_key=program, options=options
)
exception_translation_key = "select_program"
else:
array_of_options = ArrayOfOptions(options)
if affects_to == AFFECTS_TO_ACTIVE_PROGRAM:
method_call = client.set_active_program_options(
ha_id, array_of_options=array_of_options
)
exception_translation_key = "set_options_active_program"
else:
# affects_to is AFFECTS_TO_SELECTED_PROGRAM
method_call = client.set_selected_program_options(
ha_id, array_of_options=array_of_options
)
exception_translation_key = "set_options_selected_program"
try:
await method_call
except HomeConnectError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key=exception_translation_key,
translation_placeholders={
**get_dict_from_home_connect_error(err),
**(
{SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program}
if program
else {}
),
},
) from err
async def async_service_start_program(call: ServiceCall):
"""Service for starting a program."""
await _async_service_program(call, True)
@ -315,6 +564,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async_service_start_program,
schema=SERVICE_PROGRAM_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_SET_PROGRAM_AND_OPTIONS,
async_service_set_program_and_options,
schema=SERVICE_PROGRAM_AND_OPTIONS_SCHEMA,
)
return True
@ -349,6 +604,7 @@ async def async_unload_entry(
hass: HomeAssistant, entry: HomeConnectConfigEntry
) -> bool:
"""Unload a config entry."""
async_delete_issue(hass, DOMAIN, "deprecated_set_program_and_option_actions")
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -1,6 +1,10 @@
"""Constants for the Home Connect integration."""
from aiohomeconnect.model import EventKey, SettingKey, StatusKey
from typing import cast
from aiohomeconnect.model import EventKey, OptionKey, ProgramKey, SettingKey, StatusKey
from .utils import bsh_key_to_translation_key
DOMAIN = "home_connect"
@ -52,15 +56,18 @@ SERVICE_OPTION_SELECTED = "set_option_selected"
SERVICE_PAUSE_PROGRAM = "pause_program"
SERVICE_RESUME_PROGRAM = "resume_program"
SERVICE_SELECT_PROGRAM = "select_program"
SERVICE_SET_PROGRAM_AND_OPTIONS = "set_program_and_options"
SERVICE_SETTING = "change_setting"
SERVICE_START_PROGRAM = "start_program"
ATTR_AFFECTS_TO = "affects_to"
ATTR_KEY = "key"
ATTR_PROGRAM = "program"
ATTR_UNIT = "unit"
ATTR_VALUE = "value"
AFFECTS_TO_ACTIVE_PROGRAM = "active_program"
AFFECTS_TO_SELECTED_PROGRAM = "selected_program"
SVE_TRANSLATION_KEY_SET_SETTING = "set_setting_entity"
SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME = "appliance_name"
@ -70,6 +77,269 @@ SVE_TRANSLATION_PLACEHOLDER_KEY = "key"
SVE_TRANSLATION_PLACEHOLDER_VALUE = "value"
TRANSLATION_KEYS_PROGRAMS_MAP = {
bsh_key_to_translation_key(program.value): cast(ProgramKey, program)
for program in ProgramKey
if program != ProgramKey.UNKNOWN
}
PROGRAMS_TRANSLATION_KEYS_MAP = {
value: key for key, value in TRANSLATION_KEYS_PROGRAMS_MAP.items()
}
REFERENCE_MAP_ID_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.TempMap",
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.Map1",
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.Map2",
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.Map3",
)
}
CLEANING_MODE_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Silent",
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Standard",
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Power",
)
}
BEAN_AMOUNT_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.VeryMild",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.Mild",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.MildPlus",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.Normal",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.NormalPlus",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.Strong",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.StrongPlus",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.VeryStrong",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.VeryStrongPlus",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.ExtraStrong",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.DoubleShot",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.DoubleShotPlus",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.DoubleShotPlusPlus",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.TripleShot",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.TripleShotPlus",
"ConsumerProducts.CoffeeMaker.EnumType.BeanAmount.CoffeeGround",
)
}
COFFEE_TEMPERATURE_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.88C",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.90C",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.92C",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.94C",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.95C",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeTemperature.96C",
)
}
BEAN_CONTAINER_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"ConsumerProducts.CoffeeMaker.EnumType.BeanContainerSelection.Right",
"ConsumerProducts.CoffeeMaker.EnumType.BeanContainerSelection.Left",
)
}
FLOW_RATE_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"ConsumerProducts.CoffeeMaker.EnumType.FlowRate.Normal",
"ConsumerProducts.CoffeeMaker.EnumType.FlowRate.Intense",
"ConsumerProducts.CoffeeMaker.EnumType.FlowRate.IntensePlus",
)
}
COFFEE_MILK_RATIO_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.10Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.20Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.25Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.30Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.40Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.50Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.55Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.60Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.65Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.67Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.70Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.75Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.80Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.85Percent",
"ConsumerProducts.CoffeeMaker.EnumType.CoffeeMilkRatio.90Percent",
)
}
HOT_WATER_TEMPERATURE_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.WhiteTea",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.GreenTea",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.BlackTea",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.50C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.55C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.60C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.65C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.70C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.75C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.80C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.85C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.90C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.95C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.97C",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.122F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.131F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.140F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.149F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.158F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.167F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.176F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.185F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.194F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.203F",
"ConsumerProducts.CoffeeMaker.EnumType.HotWaterTemperature.Max",
)
}
DRYING_TARGET_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"LaundryCare.Dryer.EnumType.DryingTarget.IronDry",
"LaundryCare.Dryer.EnumType.DryingTarget.GentleDry",
"LaundryCare.Dryer.EnumType.DryingTarget.CupboardDry",
"LaundryCare.Dryer.EnumType.DryingTarget.CupboardDryPlus",
"LaundryCare.Dryer.EnumType.DryingTarget.ExtraDry",
)
}
VENTING_LEVEL_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"Cooking.Hood.EnumType.Stage.FanOff",
"Cooking.Hood.EnumType.Stage.FanStage01",
"Cooking.Hood.EnumType.Stage.FanStage02",
"Cooking.Hood.EnumType.Stage.FanStage03",
"Cooking.Hood.EnumType.Stage.FanStage04",
"Cooking.Hood.EnumType.Stage.FanStage05",
)
}
INTENSIVE_LEVEL_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"Cooking.Hood.EnumType.IntensiveStage.IntensiveStageOff",
"Cooking.Hood.EnumType.IntensiveStage.IntensiveStage1",
"Cooking.Hood.EnumType.IntensiveStage.IntensiveStage2",
)
}
WARMING_LEVEL_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"Cooking.Oven.EnumType.WarmingLevel.Low",
"Cooking.Oven.EnumType.WarmingLevel.Medium",
"Cooking.Oven.EnumType.WarmingLevel.High",
)
}
TEMPERATURE_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"LaundryCare.Washer.EnumType.Temperature.Cold",
"LaundryCare.Washer.EnumType.Temperature.GC20",
"LaundryCare.Washer.EnumType.Temperature.GC30",
"LaundryCare.Washer.EnumType.Temperature.GC40",
"LaundryCare.Washer.EnumType.Temperature.GC50",
"LaundryCare.Washer.EnumType.Temperature.GC60",
"LaundryCare.Washer.EnumType.Temperature.GC70",
"LaundryCare.Washer.EnumType.Temperature.GC80",
"LaundryCare.Washer.EnumType.Temperature.GC90",
"LaundryCare.Washer.EnumType.Temperature.UlCold",
"LaundryCare.Washer.EnumType.Temperature.UlWarm",
"LaundryCare.Washer.EnumType.Temperature.UlHot",
"LaundryCare.Washer.EnumType.Temperature.UlExtraHot",
)
}
SPIN_SPEED_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"LaundryCare.Washer.EnumType.SpinSpeed.Off",
"LaundryCare.Washer.EnumType.SpinSpeed.RPM400",
"LaundryCare.Washer.EnumType.SpinSpeed.RPM600",
"LaundryCare.Washer.EnumType.SpinSpeed.RPM800",
"LaundryCare.Washer.EnumType.SpinSpeed.RPM1000",
"LaundryCare.Washer.EnumType.SpinSpeed.RPM1200",
"LaundryCare.Washer.EnumType.SpinSpeed.RPM1400",
"LaundryCare.Washer.EnumType.SpinSpeed.RPM1600",
"LaundryCare.Washer.EnumType.SpinSpeed.UlOff",
"LaundryCare.Washer.EnumType.SpinSpeed.UlLow",
"LaundryCare.Washer.EnumType.SpinSpeed.UlMedium",
"LaundryCare.Washer.EnumType.SpinSpeed.UlHigh",
)
}
VARIO_PERFECT_OPTIONS = {
bsh_key_to_translation_key(option): option
for option in (
"LaundryCare.Common.EnumType.VarioPerfect.Off",
"LaundryCare.Common.EnumType.VarioPerfect.EcoPerfect",
"LaundryCare.Common.EnumType.VarioPerfect.SpeedPerfect",
)
}
PROGRAM_ENUM_OPTIONS = {
bsh_key_to_translation_key(option_key): (
option_key,
options,
)
for option_key, options in (
(
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_REFERENCE_MAP_ID,
REFERENCE_MAP_ID_OPTIONS,
),
(
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_CLEANING_MODE,
CLEANING_MODE_OPTIONS,
),
(OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_BEAN_AMOUNT, BEAN_AMOUNT_OPTIONS),
(
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_COFFEE_TEMPERATURE,
COFFEE_TEMPERATURE_OPTIONS,
),
(
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_BEAN_CONTAINER_SELECTION,
BEAN_CONTAINER_OPTIONS,
),
(OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_FLOW_RATE, FLOW_RATE_OPTIONS),
(
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_COFFEE_MILK_RATIO,
COFFEE_MILK_RATIO_OPTIONS,
),
(
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_HOT_WATER_TEMPERATURE,
HOT_WATER_TEMPERATURE_OPTIONS,
),
(OptionKey.LAUNDRY_CARE_DRYER_DRYING_TARGET, DRYING_TARGET_OPTIONS),
(OptionKey.COOKING_COMMON_HOOD_VENTING_LEVEL, VENTING_LEVEL_OPTIONS),
(OptionKey.COOKING_COMMON_HOOD_INTENSIVE_LEVEL, INTENSIVE_LEVEL_OPTIONS),
(OptionKey.COOKING_OVEN_WARMING_LEVEL, WARMING_LEVEL_OPTIONS),
(OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE, TEMPERATURE_OPTIONS),
(OptionKey.LAUNDRY_CARE_WASHER_SPIN_SPEED, SPIN_SPEED_OPTIONS),
(OptionKey.LAUNDRY_CARE_COMMON_VARIO_PERFECT, VARIO_PERFECT_OPTIONS),
)
}
OLD_NEW_UNIQUE_ID_SUFFIX_MAP = {
"ChildLock": SettingKey.BSH_COMMON_CHILD_LOCK,
"Operation State": StatusKey.BSH_COMMON_OPERATION_STATE,

View File

@ -18,6 +18,9 @@
"set_option_selected": {
"service": "mdi:gesture-tap"
},
"set_program_and_options": {
"service": "mdi:form-select"
},
"change_setting": {
"service": "mdi:cog"
}

View File

@ -3,7 +3,7 @@
"name": "Home Connect",
"codeowners": ["@DavidMStraub", "@Diegorro98", "@MartinHjelmare"],
"config_flow": true,
"dependencies": ["application_credentials"],
"dependencies": ["application_credentials", "repairs"],
"documentation": "https://www.home-assistant.io/integrations/home_connect",
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],

View File

@ -15,24 +15,20 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .common import setup_home_connect_entry
from .const import APPLIANCES_WITH_PROGRAMS, DOMAIN, SVE_TRANSLATION_PLACEHOLDER_PROGRAM
from .const import (
APPLIANCES_WITH_PROGRAMS,
DOMAIN,
PROGRAMS_TRANSLATION_KEYS_MAP,
SVE_TRANSLATION_PLACEHOLDER_PROGRAM,
TRANSLATION_KEYS_PROGRAMS_MAP,
)
from .coordinator import (
HomeConnectApplianceData,
HomeConnectConfigEntry,
HomeConnectCoordinator,
)
from .entity import HomeConnectEntity
from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error
TRANSLATION_KEYS_PROGRAMS_MAP = {
bsh_key_to_translation_key(program.value): cast(ProgramKey, program)
for program in ProgramKey
if program != ProgramKey.UNKNOWN
}
PROGRAMS_TRANSLATION_KEYS_MAP = {
value: key for key, value in TRANSLATION_KEYS_PROGRAMS_MAP.items()
}
from .utils import get_dict_from_home_connect_error
@dataclass(frozen=True, kw_only=True)

View File

@ -46,6 +46,558 @@ select_program:
example: "seconds"
selector:
text:
set_program_and_options:
fields:
device_id:
required: true
selector:
device:
integration: home_connect
affects_to:
example: active_program
required: true
selector:
select:
translation_key: affects_to
options:
- active_program
- selected_program
program:
example: dishcare_dishwasher_program_auto2
required: true
selector:
select:
mode: dropdown
custom_value: false
translation_key: programs
options:
- consumer_products_cleaning_robot_program_cleaning_clean_all
- consumer_products_cleaning_robot_program_cleaning_clean_map
- consumer_products_cleaning_robot_program_basic_go_home
- consumer_products_coffee_maker_program_beverage_ristretto
- consumer_products_coffee_maker_program_beverage_espresso
- consumer_products_coffee_maker_program_beverage_espresso_doppio
- consumer_products_coffee_maker_program_beverage_coffee
- consumer_products_coffee_maker_program_beverage_x_l_coffee
- consumer_products_coffee_maker_program_beverage_caffe_grande
- consumer_products_coffee_maker_program_beverage_espresso_macchiato
- consumer_products_coffee_maker_program_beverage_cappuccino
- consumer_products_coffee_maker_program_beverage_latte_macchiato
- consumer_products_coffee_maker_program_beverage_caffe_latte
- consumer_products_coffee_maker_program_beverage_milk_froth
- consumer_products_coffee_maker_program_beverage_warm_milk
- consumer_products_coffee_maker_program_coffee_world_kleiner_brauner
- consumer_products_coffee_maker_program_coffee_world_grosser_brauner
- consumer_products_coffee_maker_program_coffee_world_verlaengerter
- consumer_products_coffee_maker_program_coffee_world_verlaengerter_braun
- consumer_products_coffee_maker_program_coffee_world_wiener_melange
- consumer_products_coffee_maker_program_coffee_world_flat_white
- consumer_products_coffee_maker_program_coffee_world_cortado
- consumer_products_coffee_maker_program_coffee_world_cafe_cortado
- consumer_products_coffee_maker_program_coffee_world_cafe_con_leche
- consumer_products_coffee_maker_program_coffee_world_cafe_au_lait
- consumer_products_coffee_maker_program_coffee_world_doppio
- consumer_products_coffee_maker_program_coffee_world_kaapi
- consumer_products_coffee_maker_program_coffee_world_koffie_verkeerd
- consumer_products_coffee_maker_program_coffee_world_galao
- consumer_products_coffee_maker_program_coffee_world_garoto
- consumer_products_coffee_maker_program_coffee_world_americano
- consumer_products_coffee_maker_program_coffee_world_red_eye
- consumer_products_coffee_maker_program_coffee_world_black_eye
- consumer_products_coffee_maker_program_coffee_world_dead_eye
- consumer_products_coffee_maker_program_beverage_hot_water
- dishcare_dishwasher_program_pre_rinse
- dishcare_dishwasher_program_auto_1
- dishcare_dishwasher_program_auto_2
- dishcare_dishwasher_program_auto_3
- dishcare_dishwasher_program_eco_50
- dishcare_dishwasher_program_quick_45
- dishcare_dishwasher_program_intensiv_70
- dishcare_dishwasher_program_normal_65
- dishcare_dishwasher_program_glas_40
- dishcare_dishwasher_program_glass_care
- dishcare_dishwasher_program_night_wash
- dishcare_dishwasher_program_quick_65
- dishcare_dishwasher_program_normal_45
- dishcare_dishwasher_program_intensiv_45
- dishcare_dishwasher_program_auto_half_load
- dishcare_dishwasher_program_intensiv_power
- dishcare_dishwasher_program_magic_daily
- dishcare_dishwasher_program_super_60
- dishcare_dishwasher_program_kurz_60
- dishcare_dishwasher_program_express_sparkle_65
- dishcare_dishwasher_program_machine_care
- dishcare_dishwasher_program_steam_fresh
- dishcare_dishwasher_program_maximum_cleaning
- dishcare_dishwasher_program_mixed_load
- laundry_care_dryer_program_cotton
- laundry_care_dryer_program_synthetic
- laundry_care_dryer_program_mix
- laundry_care_dryer_program_blankets
- laundry_care_dryer_program_business_shirts
- laundry_care_dryer_program_down_feathers
- laundry_care_dryer_program_hygiene
- laundry_care_dryer_program_jeans
- laundry_care_dryer_program_outdoor
- laundry_care_dryer_program_synthetic_refresh
- laundry_care_dryer_program_towels
- laundry_care_dryer_program_delicates
- laundry_care_dryer_program_super_40
- laundry_care_dryer_program_shirts_15
- laundry_care_dryer_program_pillow
- laundry_care_dryer_program_anti_shrink
- laundry_care_dryer_program_my_time_my_drying_time
- laundry_care_dryer_program_time_cold
- laundry_care_dryer_program_time_warm
- laundry_care_dryer_program_in_basket
- laundry_care_dryer_program_time_cold_fix_time_cold_20
- laundry_care_dryer_program_time_cold_fix_time_cold_30
- laundry_care_dryer_program_time_cold_fix_time_cold_60
- laundry_care_dryer_program_time_warm_fix_time_warm_30
- laundry_care_dryer_program_time_warm_fix_time_warm_40
- laundry_care_dryer_program_time_warm_fix_time_warm_60
- laundry_care_dryer_program_dessous
- cooking_common_program_hood_automatic
- cooking_common_program_hood_venting
- cooking_common_program_hood_delayed_shut_off
- cooking_oven_program_heating_mode_pre_heating
- cooking_oven_program_heating_mode_hot_air
- cooking_oven_program_heating_mode_hot_air_eco
- cooking_oven_program_heating_mode_hot_air_grilling
- cooking_oven_program_heating_mode_top_bottom_heating
- cooking_oven_program_heating_mode_top_bottom_heating_eco
- cooking_oven_program_heating_mode_bottom_heating
- cooking_oven_program_heating_mode_pizza_setting
- cooking_oven_program_heating_mode_slow_cook
- cooking_oven_program_heating_mode_intensive_heat
- cooking_oven_program_heating_mode_keep_warm
- cooking_oven_program_heating_mode_preheat_ovenware
- cooking_oven_program_heating_mode_frozen_heatup_special
- cooking_oven_program_heating_mode_desiccation
- cooking_oven_program_heating_mode_defrost
- cooking_oven_program_heating_mode_proof
- cooking_oven_program_heating_mode_hot_air_30_steam
- cooking_oven_program_heating_mode_hot_air_60_steam
- cooking_oven_program_heating_mode_hot_air_80_steam
- cooking_oven_program_heating_mode_hot_air_100_steam
- cooking_oven_program_heating_mode_sabbath_programme
- cooking_oven_program_microwave_90_watt
- cooking_oven_program_microwave_180_watt
- cooking_oven_program_microwave_360_watt
- cooking_oven_program_microwave_600_watt
- cooking_oven_program_microwave_900_watt
- cooking_oven_program_microwave_1000_watt
- cooking_oven_program_microwave_max
- cooking_oven_program_heating_mode_warming_drawer
- laundry_care_washer_program_cotton
- laundry_care_washer_program_cotton_cotton_eco
- laundry_care_washer_program_cotton_eco_4060
- laundry_care_washer_program_cotton_colour
- laundry_care_washer_program_easy_care
- laundry_care_washer_program_mix
- laundry_care_washer_program_mix_night_wash
- laundry_care_washer_program_delicates_silk
- laundry_care_washer_program_wool
- laundry_care_washer_program_sensitive
- laundry_care_washer_program_auto_30
- laundry_care_washer_program_auto_40
- laundry_care_washer_program_auto_60
- laundry_care_washer_program_chiffon
- laundry_care_washer_program_curtains
- laundry_care_washer_program_dark_wash
- laundry_care_washer_program_dessous
- laundry_care_washer_program_monsoon
- laundry_care_washer_program_outdoor
- laundry_care_washer_program_plush_toy
- laundry_care_washer_program_shirts_blouses
- laundry_care_washer_program_sport_fitness
- laundry_care_washer_program_towels
- laundry_care_washer_program_water_proof
- laundry_care_washer_program_power_speed_59
- laundry_care_washer_program_super_153045_super_15
- laundry_care_washer_program_super_153045_super_1530
- laundry_care_washer_program_down_duvet_duvet
- laundry_care_washer_program_rinse_rinse_spin_drain
- laundry_care_washer_program_drum_clean
- laundry_care_washer_dryer_program_cotton
- laundry_care_washer_dryer_program_cotton_eco_4060
- laundry_care_washer_dryer_program_mix
- laundry_care_washer_dryer_program_easy_care
- laundry_care_washer_dryer_program_wash_and_dry_60
- laundry_care_washer_dryer_program_wash_and_dry_90
cleaning_robot_options:
collapsed: true
fields:
consumer_products_cleaning_robot_option_reference_map_id:
example: consumer_products_cleaning_robot_enum_type_available_maps_map1
required: false
selector:
select:
mode: dropdown
translation_key: available_maps
options:
- consumer_products_cleaning_robot_enum_type_available_maps_temp_map
- consumer_products_cleaning_robot_enum_type_available_maps_map1
- consumer_products_cleaning_robot_enum_type_available_maps_map2
- consumer_products_cleaning_robot_enum_type_available_maps_map3
consumer_products_cleaning_robot_option_cleaning_mode:
example: consumer_products_cleaning_robot_enum_type_cleaning_modes_standard
required: false
selector:
select:
mode: dropdown
translation_key: cleaning_mode
options:
- consumer_products_cleaning_robot_enum_type_cleaning_modes_silent
- consumer_products_cleaning_robot_enum_type_cleaning_modes_standard
- consumer_products_cleaning_robot_enum_type_cleaning_modes_power
coffee_maker_options:
collapsed: true
fields:
consumer_products_coffee_maker_option_bean_amount:
example: consumer_products_coffee_maker_enum_type_bean_amount_normal
required: false
selector:
select:
mode: dropdown
translation_key: bean_amount
options:
- consumer_products_coffee_maker_enum_type_bean_amount_very_mild
- consumer_products_coffee_maker_enum_type_bean_amount_mild
- consumer_products_coffee_maker_enum_type_bean_amount_mild_plus
- consumer_products_coffee_maker_enum_type_bean_amount_normal
- consumer_products_coffee_maker_enum_type_bean_amount_normal_plus
- consumer_products_coffee_maker_enum_type_bean_amount_strong
- consumer_products_coffee_maker_enum_type_bean_amount_strong_plus
- consumer_products_coffee_maker_enum_type_bean_amount_very_strong
- consumer_products_coffee_maker_enum_type_bean_amount_very_strong_plus
- consumer_products_coffee_maker_enum_type_bean_amount_extra_strong
- consumer_products_coffee_maker_enum_type_bean_amount_double_shot
- consumer_products_coffee_maker_enum_type_bean_amount_double_shot_plus
- consumer_products_coffee_maker_enum_type_bean_amount_double_shot_plus_plus
- consumer_products_coffee_maker_enum_type_bean_amount_triple_shot
- consumer_products_coffee_maker_enum_type_bean_amount_triple_shot_plus
- consumer_products_coffee_maker_enum_type_bean_amount_coffee_ground
consumer_products_coffee_maker_option_fill_quantity:
example: 60
required: false
selector:
number:
min: 0
step: 1
mode: box
unit_of_measurement: ml
consumer_products_coffee_maker_option_coffee_temperature:
example: consumer_products_coffee_maker_enum_type_coffee_temperature_88_c
required: false
selector:
select:
mode: dropdown
translation_key: coffee_temperature
options:
- consumer_products_coffee_maker_enum_type_coffee_temperature_88_c
- consumer_products_coffee_maker_enum_type_coffee_temperature_90_c
- consumer_products_coffee_maker_enum_type_coffee_temperature_92_c
- consumer_products_coffee_maker_enum_type_coffee_temperature_94_c
- consumer_products_coffee_maker_enum_type_coffee_temperature_95_c
- consumer_products_coffee_maker_enum_type_coffee_temperature_96_c
consumer_products_coffee_maker_option_bean_container:
example: consumer_products_coffee_maker_enum_type_bean_container_selection_right
required: false
selector:
select:
mode: dropdown
translation_key: bean_container
options:
- consumer_products_coffee_maker_enum_type_bean_container_selection_right
- consumer_products_coffee_maker_enum_type_bean_container_selection_left
consumer_products_coffee_maker_option_flow_rate:
example: consumer_products_coffee_maker_enum_type_flow_rate_normal
required: false
selector:
select:
mode: dropdown
translation_key: flow_rate
options:
- consumer_products_coffee_maker_enum_type_flow_rate_normal
- consumer_products_coffee_maker_enum_type_flow_rate_intense
- consumer_products_coffee_maker_enum_type_flow_rate_intense_plus
consumer_products_coffee_maker_option_multiple_beverages:
example: false
required: false
selector:
boolean:
consumer_products_coffee_maker_option_coffee_milk_ratio:
example: consumer_products_coffee_maker_enum_type_coffee_milk_ratio_50_percent
required: false
selector:
select:
mode: dropdown
translation_key: coffee_milk_ratio
options:
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_10_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_20_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_25_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_30_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_40_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_50_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_55_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_60_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_65_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_67_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_70_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_75_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_80_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_85_percent
- consumer_products_coffee_maker_enum_type_coffee_milk_ratio_90_percent
consumer_products_coffee_maker_option_hot_water_temperature:
example: consumer_products_coffee_maker_enum_type_hot_water_temperature_50_c
required: false
selector:
select:
mode: dropdown
translation_key: hot_water_temperature
options:
- consumer_products_coffee_maker_enum_type_hot_water_temperature_white_tea
- consumer_products_coffee_maker_enum_type_hot_water_temperature_green_tea
- consumer_products_coffee_maker_enum_type_hot_water_temperature_black_tea
- consumer_products_coffee_maker_enum_type_hot_water_temperature_50_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_55_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_60_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_65_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_70_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_75_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_80_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_85_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_90_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_95_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_97_c
- consumer_products_coffee_maker_enum_type_hot_water_temperature_122_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_131_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_140_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_149_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_158_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_167_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_176_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_185_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_194_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_203_f
- consumer_products_coffee_maker_enum_type_hot_water_temperature_max
dish_washer_options:
collapsed: true
fields:
b_s_h_common_option_start_in_relative:
example: 3600
required: false
selector:
number:
min: 0
step: 1
mode: box
unit_of_measurement: s
dishcare_dishwasher_option_intensiv_zone:
example: false
required: false
selector:
boolean:
dishcare_dishwasher_option_brilliance_dry:
example: false
required: false
selector:
boolean:
dishcare_dishwasher_option_vario_speed_plus:
example: false
required: false
selector:
boolean:
dishcare_dishwasher_option_silence_on_demand:
example: false
required: false
selector:
boolean:
dishcare_dishwasher_option_half_load:
example: false
required: false
selector:
boolean:
dishcare_dishwasher_option_extra_dry:
example: false
required: false
selector:
boolean:
dishcare_dishwasher_option_hygiene_plus:
example: false
required: false
selector:
boolean:
dishcare_dishwasher_option_eco_dry:
example: false
required: false
selector:
boolean:
dishcare_dishwasher_option_zeolite_dry:
example: false
required: false
selector:
boolean:
dryer_options:
collapsed: true
fields:
laundry_care_dryer_option_drying_target:
example: laundry_care_dryer_enum_type_drying_target_iron_dry
required: false
selector:
select:
mode: dropdown
translation_key: drying_target
options:
- laundry_care_dryer_enum_type_drying_target_iron_dry
- laundry_care_dryer_enum_type_drying_target_gentle_dry
- laundry_care_dryer_enum_type_drying_target_cupboard_dry
- laundry_care_dryer_enum_type_drying_target_cupboard_dry_plus
- laundry_care_dryer_enum_type_drying_target_extra_dry
hood_options:
collapsed: true
fields:
cooking_hood_option_venting_level:
example: cooking_hood_enum_type_stage_fan_stage01
required: false
selector:
select:
mode: dropdown
translation_key: venting_level
options:
- cooking_hood_enum_type_stage_fan_off
- cooking_hood_enum_type_stage_fan_stage01
- cooking_hood_enum_type_stage_fan_stage02
- cooking_hood_enum_type_stage_fan_stage03
- cooking_hood_enum_type_stage_fan_stage04
- cooking_hood_enum_type_stage_fan_stage05
cooking_hood_option_intensive_level:
example: cooking_hood_enum_type_intensive_stage_intensive_stage1
required: false
selector:
select:
mode: dropdown
translation_key: intensive_level
options:
- cooking_hood_enum_type_intensive_stage_intensive_stage_off
- cooking_hood_enum_type_intensive_stage_intensive_stage1
- cooking_hood_enum_type_intensive_stage_intensive_stage2
oven_options:
collapsed: true
fields:
cooking_oven_option_setpoint_temperature:
example: 180
required: false
selector:
number:
min: 0
step: 1
mode: box
unit_of_measurement: °C/°F
b_s_h_common_option_duration:
example: 900
required: false
selector:
number:
min: 0
step: 1
mode: box
unit_of_measurement: s
cooking_oven_option_fast_pre_heat:
example: false
required: false
selector:
boolean:
warming_drawer_options:
collapsed: true
fields:
cooking_oven_option_warming_level:
example: cooking_oven_enum_type_warming_level_medium
required: false
selector:
select:
mode: dropdown
translation_key: warming_level
options:
- cooking_oven_enum_type_warming_level_low
- cooking_oven_enum_type_warming_level_medium
- cooking_oven_enum_type_warming_level_high
washer_options:
collapsed: true
fields:
laundry_care_washer_option_temperature:
example: laundry_care_washer_enum_type_temperature_g_c40
required: false
selector:
select:
mode: dropdown
translation_key: washer_temperature
options:
- laundry_care_washer_enum_type_temperature_cold
- laundry_care_washer_enum_type_temperature_g_c20
- laundry_care_washer_enum_type_temperature_g_c30
- laundry_care_washer_enum_type_temperature_g_c40
- laundry_care_washer_enum_type_temperature_g_c50
- laundry_care_washer_enum_type_temperature_g_c60
- laundry_care_washer_enum_type_temperature_g_c70
- laundry_care_washer_enum_type_temperature_g_c80
- laundry_care_washer_enum_type_temperature_g_c90
- laundry_care_washer_enum_type_temperature_ul_cold
- laundry_care_washer_enum_type_temperature_ul_warm
- laundry_care_washer_enum_type_temperature_ul_hot
- laundry_care_washer_enum_type_temperature_ul_extra_hot
laundry_care_washer_option_spin_speed:
example: laundry_care_washer_enum_type_spin_speed_r_p_m800
required: false
selector:
select:
mode: dropdown
translation_key: spin_speed
options:
- laundry_care_washer_enum_type_spin_speed_off
- laundry_care_washer_enum_type_spin_speed_r_p_m400
- laundry_care_washer_enum_type_spin_speed_r_p_m600
- laundry_care_washer_enum_type_spin_speed_r_p_m800
- laundry_care_washer_enum_type_spin_speed_r_p_m1000
- laundry_care_washer_enum_type_spin_speed_r_p_m1200
- laundry_care_washer_enum_type_spin_speed_r_p_m1400
- laundry_care_washer_enum_type_spin_speed_r_p_m1600
- laundry_care_washer_enum_type_spin_speed_ul_off
- laundry_care_washer_enum_type_spin_speed_ul_low
- laundry_care_washer_enum_type_spin_speed_ul_medium
- laundry_care_washer_enum_type_spin_speed_ul_high
b_s_h_common_option_finish_in_relative:
example: 3600
required: false
selector:
number:
min: 0
step: 1
mode: box
unit_of_measurement: s
laundry_care_washer_option_i_dos1_active:
example: false
required: false
selector:
boolean:
laundry_care_washer_option_i_dos2_active:
example: false
required: false
selector:
boolean:
laundry_care_washer_option_vario_perfect:
example: laundry_care_common_enum_type_vario_perfect_eco_perfect
required: false
selector:
select:
mode: dropdown
translation_key: vario_perfect
options:
- laundry_care_common_enum_type_vario_perfect_off
- laundry_care_common_enum_type_vario_perfect_eco_perfect
- laundry_care_common_enum_type_vario_perfect_speed_perfect
pause_program:
fields:
device_id:

File diff suppressed because it is too large Load Diff

View File

@ -12,7 +12,7 @@ import logging
from universal_silabs_flasher.const import ApplicationType as FlasherApplicationType
from universal_silabs_flasher.flasher import Flasher
from homeassistant.components.hassio import AddonError, AddonState
from homeassistant.components.hassio import AddonError, AddonManager, AddonState
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.hassio import is_hassio
@ -143,6 +143,31 @@ class FirmwareInfo:
return all(states)
async def get_otbr_addon_firmware_info(
hass: HomeAssistant, otbr_addon_manager: AddonManager
) -> FirmwareInfo | None:
"""Get firmware info from the OTBR add-on."""
try:
otbr_addon_info = await otbr_addon_manager.async_get_addon_info()
except AddonError:
return None
if otbr_addon_info.state == AddonState.NOT_INSTALLED:
return None
if (otbr_path := otbr_addon_info.options.get("device")) is None:
return None
# Only create a new entry if there are no existing OTBR ones
return FirmwareInfo(
device=otbr_path,
firmware_type=ApplicationType.SPINEL,
firmware_version=None,
source="otbr",
owners=[OwningAddon(slug=otbr_addon_manager.addon_slug)],
)
async def guess_hardware_owners(
hass: HomeAssistant, device_path: str
) -> list[FirmwareInfo]:
@ -155,28 +180,19 @@ async def guess_hardware_owners(
# It may be possible for the OTBR addon to be present without the integration
if is_hassio(hass):
otbr_addon_manager = get_otbr_addon_manager(hass)
otbr_addon_fw_info = await get_otbr_addon_firmware_info(
hass, otbr_addon_manager
)
otbr_path = (
otbr_addon_fw_info.device if otbr_addon_fw_info is not None else None
)
try:
otbr_addon_info = await otbr_addon_manager.async_get_addon_info()
except AddonError:
pass
else:
if otbr_addon_info.state != AddonState.NOT_INSTALLED:
otbr_path = otbr_addon_info.options.get("device")
# Only create a new entry if there are no existing OTBR ones
if otbr_path is not None and not any(
info.source == "otbr" for info in device_guesses[otbr_path]
):
device_guesses[otbr_path].append(
FirmwareInfo(
device=otbr_path,
firmware_type=ApplicationType.SPINEL,
firmware_version=None,
source="otbr",
owners=[OwningAddon(slug=otbr_addon_manager.addon_slug)],
)
)
# Only create a new entry if there are no existing OTBR ones
if otbr_path is not None and not any(
info.source == "otbr" for info in device_guesses[otbr_path]
):
assert otbr_addon_fw_info is not None
device_guesses[otbr_path].append(otbr_addon_fw_info)
if is_hassio(hass):
multipan_addon_manager = await get_multiprotocol_addon_manager(hass)

View File

@ -62,7 +62,7 @@ class HMDevice(Entity):
if self._state:
self._state = self._state.upper()
async def async_added_to_hass(self):
async def async_added_to_hass(self) -> None:
"""Load data init callbacks."""
self._subscribe_homematic_events()
@ -77,7 +77,7 @@ class HMDevice(Entity):
return self._name
@property
def available(self):
def available(self) -> bool:
"""Return true if device is available."""
return self._available

View File

@ -23,8 +23,10 @@ import voluptuous as vol
from homeassistant.components import onboarding
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_IP_ADDRESS, CONF_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import AbortFlow
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import instance_id
from homeassistant.helpers.selector import TextSelector
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
@ -88,7 +90,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
# Tell device we want a token, user must now press the button within 30 seconds
# The first attempt will always fail, but this opens the window to press the button
token = await async_request_token(self.ip_address)
token = await async_request_token(self.hass, self.ip_address)
errors: dict[str, str] | None = None
if token is None:
@ -250,7 +252,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
errors: dict[str, str] | None = None
token = await async_request_token(self.ip_address)
token = await async_request_token(self.hass, self.ip_address)
if user_input is not None:
if token is None:
@ -353,7 +355,7 @@ async def async_try_connect(ip_address: str, token: str | None = None) -> Device
await energy_api.close()
async def async_request_token(ip_address: str) -> str | None:
async def async_request_token(hass: HomeAssistant, ip_address: str) -> str | None:
"""Try to request a token from the device.
This method is used to request a token from the device,
@ -362,8 +364,12 @@ async def async_request_token(ip_address: str) -> str | None:
api = HomeWizardEnergyV2(ip_address)
# Get a part of the unique id to make the token unique
# This is to prevent token conflicts when multiple HA instances are used
uuid = await instance_id.async_get(hass)
try:
return await api.get_token("home-assistant")
return await api.get_token(f"home-assistant#{uuid[:6]}")
except DisabledError:
return None
finally:

View File

@ -47,7 +47,7 @@ class MigrateToV2ApiRepairFlow(RepairsFlow):
# Tell device we want a token, user must now press the button within 30 seconds
# The first attempt will always fail, but this opens the window to press the button
token = await async_request_token(ip_address)
token = await async_request_token(self.hass, ip_address)
errors: dict[str, str] | None = None
if token is None:

View File

@ -54,7 +54,7 @@ class IHCEntity(Entity):
self.ihc_note = ""
self.ihc_position = ""
async def async_added_to_hass(self):
async def async_added_to_hass(self) -> None:
"""Add callback for IHC changes."""
_LOGGER.debug("Adding IHC entity notify event: %s", self.ihc_id)
self.ihc_controller.add_notify_event(self.ihc_id, self.on_ihc_change, True)

View File

@ -109,7 +109,7 @@ class InsteonEntity(Entity):
)
self.async_write_ha_state()
async def async_added_to_hass(self):
async def async_added_to_hass(self) -> None:
"""Register INSTEON update events."""
_LOGGER.debug(
"Tracking updates for device %s group %d name %s",
@ -137,7 +137,7 @@ class InsteonEntity(Entity):
)
)
async def async_will_remove_from_hass(self):
async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe to INSTEON update events."""
_LOGGER.debug(
"Remove tracking updates for device %s group %d name %s",

View File

@ -106,7 +106,7 @@ class ISYNodeEntity(ISYEntity):
return getattr(self._node, TAG_ENABLED, True)
@property
def extra_state_attributes(self) -> dict:
def extra_state_attributes(self) -> dict[str, Any]:
"""Get the state attributes for the device.
The 'aux_properties' in the pyisy Node class are combined with the
@ -189,7 +189,7 @@ class ISYProgramEntity(ISYEntity):
self._actions = actions
@property
def extra_state_attributes(self) -> dict:
def extra_state_attributes(self) -> dict[str, Any]:
"""Get the state attributes for the device."""
attr = {}
if self._actions:

View File

@ -58,7 +58,7 @@
"services": {
"send_raw_node_command": {
"name": "Send raw node command",
"description": "[%key:component::isy994::options::step::init::description%]",
"description": "Sends a “raw” (e.g., DON, DOF) ISY REST device command to a node using its Home Assistant entity ID. This is useful for devices that arent fully supported in Home Assistant yet, such as controls for many NodeServer nodes.",
"fields": {
"command": {
"name": "Command",

View File

@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import SCAN_INTERVAL
from .const import DOMAIN, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
@ -75,16 +75,28 @@ class LaCrosseUpdateCoordinator(DataUpdateCoordinator[list[Sensor]]):
try:
# Fetch last hour of data
for sensor in self.devices:
sensor.data = (
await self.api.get_sensor_status(
sensor=sensor,
tz=self.hass.config.time_zone,
data = await self.api.get_sensor_status(
sensor=sensor,
tz=self.hass.config.time_zone,
)
_LOGGER.debug("Got data: %s", data)
if data_error := data.get("error"):
if data_error == "no_readings":
sensor.data = None
_LOGGER.debug("No readings for %s", sensor.name)
continue
_LOGGER.debug("Error: %s", data_error)
raise UpdateFailed(
translation_domain=DOMAIN, translation_key="update_error"
)
)["data"]["current"]
_LOGGER.debug("Got data: %s", sensor.data)
sensor.data = data["data"]["current"]
except HTTPError as error:
raise UpdateFailed from error
raise UpdateFailed(
translation_domain=DOMAIN, translation_key="update_error"
) from error
# Verify that we have permission to read the sensors
for sensor in self.devices:

View File

@ -64,6 +64,7 @@ SENSOR_DESCRIPTIONS = {
state_class=SensorStateClass.MEASUREMENT,
value_fn=get_value,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
suggested_display_precision=2,
),
"Humidity": LaCrosseSensorEntityDescription(
key="Humidity",
@ -71,6 +72,7 @@ SENSOR_DESCRIPTIONS = {
state_class=SensorStateClass.MEASUREMENT,
value_fn=get_value,
native_unit_of_measurement=PERCENTAGE,
suggested_display_precision=2,
),
"HeatIndex": LaCrosseSensorEntityDescription(
key="HeatIndex",
@ -79,6 +81,7 @@ SENSOR_DESCRIPTIONS = {
state_class=SensorStateClass.MEASUREMENT,
value_fn=get_value,
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
suggested_display_precision=2,
),
"WindSpeed": LaCrosseSensorEntityDescription(
key="WindSpeed",
@ -86,6 +89,7 @@ SENSOR_DESCRIPTIONS = {
value_fn=get_value,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
device_class=SensorDeviceClass.WIND_SPEED,
suggested_display_precision=2,
),
"Rain": LaCrosseSensorEntityDescription(
key="Rain",
@ -93,12 +97,14 @@ SENSOR_DESCRIPTIONS = {
value_fn=get_value,
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
device_class=SensorDeviceClass.PRECIPITATION,
suggested_display_precision=2,
),
"WindHeading": LaCrosseSensorEntityDescription(
key="WindHeading",
translation_key="wind_heading",
value_fn=get_value,
native_unit_of_measurement=DEGREE,
suggested_display_precision=2,
),
"WetDry": LaCrosseSensorEntityDescription(
key="WetDry",
@ -117,6 +123,7 @@ SENSOR_DESCRIPTIONS = {
value_fn=get_value,
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
native_unit_of_measurement=UnitOfPressure.HPA,
suggested_display_precision=2,
),
"FeelsLike": LaCrosseSensorEntityDescription(
key="FeelsLike",
@ -125,6 +132,7 @@ SENSOR_DESCRIPTIONS = {
value_fn=get_value,
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
suggested_display_precision=2,
),
"WindChill": LaCrosseSensorEntityDescription(
key="WindChill",
@ -133,6 +141,7 @@ SENSOR_DESCRIPTIONS = {
value_fn=get_value,
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
suggested_display_precision=2,
),
}
# map of API returned unit of measurement strings to their corresponding unit of measurement

View File

@ -42,5 +42,10 @@
"name": "Wind chill"
}
}
},
"exceptions": {
"update_error": {
"message": "Error updating data"
}
}
}

View File

@ -22,7 +22,12 @@ from .const import (
)
from .coordinator import LetPotConfigEntry, LetPotDeviceCoordinator
PLATFORMS: list[Platform] = [Platform.SWITCH, Platform.TIME]
PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.SENSOR,
Platform.SWITCH,
Platform.TIME,
]
async def async_setup_entry(hass: HomeAssistant, entry: LetPotConfigEntry) -> bool:

View File

@ -0,0 +1,122 @@
"""Support for LetPot binary sensor entities."""
from collections.abc import Callable
from dataclasses import dataclass
from letpot.models import DeviceFeature, LetPotDeviceStatus
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import LetPotConfigEntry, LetPotDeviceCoordinator
from .entity import LetPotEntity, LetPotEntityDescription
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class LetPotBinarySensorEntityDescription(
LetPotEntityDescription, BinarySensorEntityDescription
):
"""Describes a LetPot binary sensor entity."""
is_on_fn: Callable[[LetPotDeviceStatus], bool]
BINARY_SENSORS: tuple[LetPotBinarySensorEntityDescription, ...] = (
LetPotBinarySensorEntityDescription(
key="low_nutrients",
translation_key="low_nutrients",
is_on_fn=lambda status: bool(status.errors.low_nutrients),
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
device_class=BinarySensorDeviceClass.PROBLEM,
supported_fn=(
lambda coordinator: coordinator.data.errors.low_nutrients is not None
),
),
LetPotBinarySensorEntityDescription(
key="low_water",
translation_key="low_water",
is_on_fn=lambda status: bool(status.errors.low_water),
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
device_class=BinarySensorDeviceClass.PROBLEM,
supported_fn=lambda coordinator: coordinator.data.errors.low_water is not None,
),
LetPotBinarySensorEntityDescription(
key="pump",
translation_key="pump",
is_on_fn=lambda status: status.pump_status == 1,
device_class=BinarySensorDeviceClass.RUNNING,
supported_fn=(
lambda coordinator: DeviceFeature.PUMP_STATUS
in coordinator.device_client.device_features
),
),
LetPotBinarySensorEntityDescription(
key="pump_error",
translation_key="pump_error",
is_on_fn=lambda status: bool(status.errors.pump_malfunction),
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
device_class=BinarySensorDeviceClass.PROBLEM,
supported_fn=(
lambda coordinator: coordinator.data.errors.pump_malfunction is not None
),
),
LetPotBinarySensorEntityDescription(
key="refill_error",
translation_key="refill_error",
is_on_fn=lambda status: bool(status.errors.refill_error),
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
device_class=BinarySensorDeviceClass.PROBLEM,
supported_fn=(
lambda coordinator: coordinator.data.errors.refill_error is not None
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: LetPotConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up LetPot binary sensor entities based on a config entry and device status/features."""
coordinators = entry.runtime_data
async_add_entities(
LetPotBinarySensorEntity(coordinator, description)
for description in BINARY_SENSORS
for coordinator in coordinators
if description.supported_fn(coordinator)
)
class LetPotBinarySensorEntity(LetPotEntity, BinarySensorEntity):
"""Defines a LetPot binary sensor entity."""
entity_description: LetPotBinarySensorEntityDescription
def __init__(
self,
coordinator: LetPotDeviceCoordinator,
description: LetPotBinarySensorEntityDescription,
) -> None:
"""Initialize LetPot binary sensor entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{coordinator.device.serial_number}_{description.key}"
@property
def is_on(self) -> bool:
"""Return if the binary sensor is on."""
return self.entity_description.is_on_fn(self.coordinator.data)

View File

@ -1,18 +1,27 @@
"""Base class for LetPot entities."""
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from typing import Any, Concatenate
from letpot.exceptions import LetPotConnectionException, LetPotException
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import LetPotDeviceCoordinator
@dataclass(frozen=True, kw_only=True)
class LetPotEntityDescription(EntityDescription):
"""Description for all LetPot entities."""
supported_fn: Callable[[LetPotDeviceCoordinator], bool] = lambda _: True
class LetPotEntity(CoordinatorEntity[LetPotDeviceCoordinator]):
"""Defines a base LetPot entity."""

View File

@ -1,5 +1,30 @@
{
"entity": {
"binary_sensor": {
"low_nutrients": {
"default": "mdi:beaker-alert",
"state": {
"off": "mdi:beaker"
}
},
"low_water": {
"default": "mdi:water-percent-alert",
"state": {
"off": "mdi:water-percent"
}
},
"pump": {
"default": "mdi:pump",
"state": {
"off": "mdi:pump-off"
}
}
},
"sensor": {
"water_level": {
"default": "mdi:water-percent"
}
},
"switch": {
"alarm_sound": {
"default": "mdi:bell-ring",

Some files were not shown because too many files have changed in this diff Show More