Compare commits

..

1 Commits

Author SHA1 Message Date
Ludovic BOUÉ
bada82fc6e Bump python-roborock to 5.5.1 2026-04-06 16:25:53 +02:00
495 changed files with 4350 additions and 18480 deletions

View File

@@ -47,6 +47,10 @@ jobs:
with:
python-version-file: ".python-version"
- name: Get information
id: info
uses: home-assistant/actions/helpers/info@master # zizmor: ignore[unpinned-uses]
- name: Get version
id: version
uses: home-assistant/actions/helpers/version@master # zizmor: ignore[unpinned-uses]

View File

@@ -28,11 +28,11 @@ jobs:
persist-credentials: false
- name: Initialize CodeQL
uses: github/codeql-action/init@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
uses: github/codeql-action/init@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
uses: github/codeql-action/analyze@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
with:
category: "/language:python"

6
CODEOWNERS generated
View File

@@ -1263,8 +1263,8 @@ CLAUDE.md @home-assistant/core
/tests/components/openuv/ @bachya
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
/tests/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
/homeassistant/components/opnsense/ @HarlemSquirrel @Snuffy2
/tests/components/opnsense/ @HarlemSquirrel @Snuffy2
/homeassistant/components/opnsense/ @mtreinish
/tests/components/opnsense/ @mtreinish
/homeassistant/components/opower/ @tronikos
/tests/components/opower/ @tronikos
/homeassistant/components/oralb/ @bdraco @Lash-L
@@ -1875,8 +1875,6 @@ CLAUDE.md @home-assistant/core
/tests/components/vicare/ @CFenner
/homeassistant/components/victron_ble/ @rajlaud
/tests/components/victron_ble/ @rajlaud
/homeassistant/components/victron_gx/ @tomer-w
/tests/components/victron_gx/ @tomer-w
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
/tests/components/victron_remote_monitoring/ @AndyTempel
/homeassistant/components/vilfo/ @ManneW

View File

@@ -1,5 +1,5 @@
{
"domain": "victron",
"name": "Victron",
"integrations": ["victron_gx", "victron_ble", "victron_remote_monitoring"]
"integrations": ["victron_ble", "victron_remote_monitoring"]
}

View File

@@ -1,7 +1,11 @@
"""The Actron Air integration."""
from actron_neo_api import ActronAirAPI, ActronAirAPIError, ActronAirAuthError
from actron_neo_api.models.system import ActronAirSystemInfo
from actron_neo_api import (
ActronAirACSystem,
ActronAirAPI,
ActronAirAPIError,
ActronAirAuthError,
)
from homeassistant.const import CONF_API_TOKEN, Platform
from homeassistant.core import HomeAssistant
@@ -21,7 +25,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
"""Set up Actron Air integration from a config entry."""
api = ActronAirAPI(refresh_token=entry.data[CONF_API_TOKEN])
systems: list[ActronAirSystemInfo] = []
systems: list[ActronAirACSystem] = []
try:
systems = await api.get_ac_systems()
@@ -40,9 +44,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
system_coordinators: dict[str, ActronAirSystemCoordinator] = {}
for system in systems:
coordinator = ActronAirSystemCoordinator(hass, entry, api, system)
_LOGGER.debug("Setting up coordinator for system: %s", system.serial)
_LOGGER.debug("Setting up coordinator for system: %s", system["serial"])
await coordinator.async_config_entry_first_refresh()
system_coordinators[system.serial] = coordinator
system_coordinators[system["serial"]] = coordinator
entry.runtime_data = ActronAirRuntimeData(
api=api,

View File

@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
from .entity import ActronAirAcEntity, ActronAirZoneEntity, actron_air_command
from .entity import ActronAirAcEntity, ActronAirZoneEntity, handle_actron_api_errors
PARALLEL_UPDATES = 0
@@ -136,19 +136,19 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
"""Return the target temperature."""
return self._status.user_aircon_settings.temperature_setpoint_cool_c
@actron_air_command
@handle_actron_api_errors
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set a new fan mode."""
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode)
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
@actron_air_command
@handle_actron_api_errors
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC mode."""
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode)
await self._status.ac_system.set_system_mode(ac_mode)
@actron_air_command
@handle_actron_api_errors
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
@@ -212,13 +212,13 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
"""Return the target temperature."""
return self._zone.temperature_setpoint_cool_c
@actron_air_command
@handle_actron_api_errors
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC mode."""
is_enabled = hvac_mode != HVACMode.OFF
await self._zone.enable(is_enabled)
@actron_air_command
@handle_actron_api_errors
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the temperature."""
await self._zone.set_temperature(temperature=kwargs.get(ATTR_TEMPERATURE))

View File

@@ -38,10 +38,10 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.error("OAuth2 flow failed: %s", err)
return self.async_abort(reason="oauth2_error")
self._device_code = device_code_response.device_code
self._user_code = device_code_response.user_code
self._verification_uri = device_code_response.verification_uri_complete
self._expires_minutes = str(device_code_response.expires_in // 60)
self._device_code = device_code_response["device_code"]
self._user_code = device_code_response["user_code"]
self._verification_uri = device_code_response["verification_uri_complete"]
self._expires_minutes = str(device_code_response["expires_in"] // 60)
async def _wait_for_authorization() -> None:
"""Wait for the user to authorize the device."""

View File

@@ -6,12 +6,12 @@ from dataclasses import dataclass
from datetime import timedelta
from actron_neo_api import (
ActronAirACSystem,
ActronAirAPI,
ActronAirAPIError,
ActronAirAuthError,
ActronAirStatus,
)
from actron_neo_api.models.system import ActronAirSystemInfo
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
@@ -38,7 +38,7 @@ class ActronAirRuntimeData:
type ActronAirConfigEntry = ConfigEntry[ActronAirRuntimeData]
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirStatus]):
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
"""System coordinator for Actron Air integration."""
def __init__(
@@ -46,7 +46,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirStatus]):
hass: HomeAssistant,
entry: ActronAirConfigEntry,
api: ActronAirAPI,
system: ActronAirSystemInfo,
system: ActronAirACSystem,
) -> None:
"""Initialize the coordinator."""
super().__init__(
@@ -57,7 +57,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirStatus]):
config_entry=entry,
)
self.system = system
self.serial_number = system.serial
self.serial_number = system["serial"]
self.api = api
self.status = self.api.state_manager.get_status(self.serial_number)
self.last_seen = dt_util.utcnow()

View File

@@ -1,35 +0,0 @@
"""Diagnostics support for Actron Air."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_API_TOKEN
from homeassistant.core import HomeAssistant
from .coordinator import ActronAirConfigEntry
TO_REDACT = {CONF_API_TOKEN, "master_serial", "serial_number", "serial"}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
entry: ActronAirConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinators: dict[int, Any] = {}
for idx, coordinator in enumerate(entry.runtime_data.system_coordinators.values()):
coordinators[idx] = {
"system": async_redact_data(
coordinator.system.model_dump(mode="json"), TO_REDACT
),
"status": async_redact_data(
coordinator.data.model_dump(mode="json", exclude={"last_known_state"}),
TO_REDACT,
),
}
return {
"entry_data": async_redact_data(entry.data, TO_REDACT),
"coordinators": coordinators,
}

View File

@@ -14,14 +14,10 @@ from .const import DOMAIN
from .coordinator import ActronAirSystemCoordinator
def actron_air_command[_EntityT: ActronAirEntity, **_P](
def handle_actron_api_errors[_EntityT: ActronAirEntity, **_P](
func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]:
"""Decorator for Actron Air API calls.
Handles ActronAirAPIError exceptions, and requests a coordinator update
to update the status of the devices as soon as possible.
"""
"""Decorate Actron Air API calls to handle ActronAirAPIError exceptions."""
@wraps(func)
async def wrapper(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
@@ -34,7 +30,6 @@ def actron_air_command[_EntityT: ActronAirEntity, **_P](
translation_key="api_error",
translation_placeholders={"error": str(err)},
) from err
self.coordinator.async_set_updated_data(self.coordinator.data)
return wrapper

View File

@@ -13,5 +13,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"requirements": ["actron-neo-api==0.5.0"]
"requirements": ["actron-neo-api==0.4.1"]
}

View File

@@ -41,7 +41,7 @@ rules:
# Gold
devices: done
diagnostics: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: This integration uses DHCP discovery, however is cloud polling. Therefore there is no information to update.

View File

@@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
from .entity import ActronAirAcEntity, actron_air_command
from .entity import ActronAirAcEntity, handle_actron_api_errors
PARALLEL_UPDATES = 0
@@ -105,12 +105,12 @@ class ActronAirSwitch(ActronAirAcEntity, SwitchEntity):
"""Return true if the switch is on."""
return self.entity_description.is_on_fn(self.coordinator)
@actron_air_command
@handle_actron_api_errors
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self.entity_description.set_fn(self.coordinator, True)
@actron_air_command
@handle_actron_api_errors
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self.entity_description.set_fn(self.coordinator, False)

View File

@@ -11,12 +11,12 @@
"user": {
"data": {
"tracked_apps": "Apps",
"tracked_custom_integrations": "Community integrations",
"tracked_custom_integrations": "Custom integrations",
"tracked_integrations": "Integrations"
},
"data_description": {
"tracked_apps": "Select the apps you want to track",
"tracked_custom_integrations": "Select the community integrations you want to track",
"tracked_custom_integrations": "Select the custom integrations you want to track",
"tracked_integrations": "Select the integrations you want to track"
}
}
@@ -31,7 +31,7 @@
"unit_of_measurement": "[%key:component::analytics_insights::entity::sensor::apps::unit_of_measurement%]"
},
"custom_integrations": {
"name": "{custom_integration_domain} (community)",
"name": "{custom_integration_domain} (custom)",
"unit_of_measurement": "[%key:component::analytics_insights::entity::sensor::apps::unit_of_measurement%]"
},
"total_active_installations": {

View File

@@ -92,7 +92,6 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
_LOGGER.debug("Updating statistics for the first time")
usage_sum = 0.0
last_stats_time = None
allow_update_last_stored_hour = False
else:
if not meter.readings or len(meter.readings) == 0:
_LOGGER.debug("No recent usage statistics found, skipping update")
@@ -108,7 +107,6 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
continue
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
_LOGGER.debug("Getting statistics at %s", start)
stats: dict[str, list[Any]] = {}
for end in (start + timedelta(seconds=1), None):
stats = await get_instance(self.hass).async_add_executor_job(
statistics_during_period,
@@ -129,28 +127,15 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
"Not found, trying to find oldest statistic after %s",
start,
)
assert stats
if not stats or not stats.get(usage_statistic_id):
_LOGGER.debug(
"Could not find existing statistics during period lookup for %s, "
"falling back to last stored statistic",
usage_statistic_id,
)
allow_update_last_stored_hour = True
last_records = last_stat[usage_statistic_id]
usage_sum = float(last_records[0].get("sum") or 0.0)
last_stats_time = last_records[0]["start"]
else:
allow_update_last_stored_hour = False
records = stats[usage_statistic_id]
def _safe_get_sum(records: list[Any]) -> float:
if records and "sum" in records[0]:
return float(records[0]["sum"])
return 0.0
def _safe_get_sum(records: list[Any]) -> float:
if records and "sum" in records[0]:
return float(records[0]["sum"])
return 0.0
usage_sum = _safe_get_sum(records)
last_stats_time = records[0]["start"]
usage_sum = _safe_get_sum(stats.get(usage_statistic_id, []))
last_stats_time = stats[usage_statistic_id][0]["start"]
usage_statistics = []
@@ -163,13 +148,7 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
)
continue
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
if last_stats_time is not None and (
start.timestamp() < last_stats_time
or (
start.timestamp() == last_stats_time
and not allow_update_last_stored_hour
)
):
if last_stats_time is not None and start.timestamp() <= last_stats_time:
continue
usage_state = max(0, read["consumption"] / 1000)
usage_sum = max(0, read["read"])

View File

@@ -48,12 +48,10 @@ from .const import (
CONF_CODE_EXECUTION,
CONF_MAX_TOKENS,
CONF_PROMPT,
CONF_PROMPT_CACHING,
CONF_RECOMMENDED,
CONF_TEMPERATURE,
CONF_THINKING_BUDGET,
CONF_THINKING_EFFORT,
CONF_TOOL_SEARCH,
CONF_WEB_SEARCH,
CONF_WEB_SEARCH_CITY,
CONF_WEB_SEARCH_COUNTRY,
@@ -67,9 +65,7 @@ from .const import (
DOMAIN,
NON_ADAPTIVE_THINKING_MODELS,
NON_THINKING_MODELS,
TOOL_SEARCH_UNSUPPORTED_MODELS,
WEB_SEARCH_UNSUPPORTED_MODELS,
PromptCaching,
)
if TYPE_CHECKING:
@@ -360,16 +356,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
CONF_TEMPERATURE,
default=DEFAULT[CONF_TEMPERATURE],
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
vol.Optional(
CONF_PROMPT_CACHING,
default=DEFAULT[CONF_PROMPT_CACHING],
): SelectSelector(
SelectSelectorConfig(
options=[x.value for x in PromptCaching],
translation_key=CONF_PROMPT_CACHING,
mode=SelectSelectorMode.DROPDOWN,
)
),
}
if user_input is not None:
@@ -468,16 +454,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
self.options.pop(CONF_WEB_SEARCH_COUNTRY, None)
self.options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
if not model.startswith(tuple(TOOL_SEARCH_UNSUPPORTED_MODELS)):
step_schema[
vol.Optional(
CONF_TOOL_SEARCH,
default=DEFAULT[CONF_TOOL_SEARCH],
)
] = bool
else:
self.options.pop(CONF_TOOL_SEARCH, None)
if not step_schema:
user_input = {}

View File

@@ -1,6 +1,5 @@
"""Constants for the Anthropic integration."""
from enum import StrEnum
import logging
DOMAIN = "anthropic"
@@ -14,11 +13,9 @@ CONF_PROMPT = "prompt"
CONF_CHAT_MODEL = "chat_model"
CONF_CODE_EXECUTION = "code_execution"
CONF_MAX_TOKENS = "max_tokens"
CONF_PROMPT_CACHING = "prompt_caching"
CONF_TEMPERATURE = "temperature"
CONF_THINKING_BUDGET = "thinking_budget"
CONF_THINKING_EFFORT = "thinking_effort"
CONF_TOOL_SEARCH = "tool_search"
CONF_WEB_SEARCH = "web_search"
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
@@ -27,31 +24,20 @@ CONF_WEB_SEARCH_REGION = "region"
CONF_WEB_SEARCH_COUNTRY = "country"
CONF_WEB_SEARCH_TIMEZONE = "timezone"
class PromptCaching(StrEnum):
"""Prompt caching options."""
OFF = "off"
PROMPT = "prompt"
AUTOMATIC = "automatic"
MIN_THINKING_BUDGET = 1024
DEFAULT = {
CONF_CHAT_MODEL: "claude-haiku-4-5",
CONF_CODE_EXECUTION: False,
CONF_MAX_TOKENS: 3000,
CONF_PROMPT_CACHING: PromptCaching.PROMPT.value,
CONF_TEMPERATURE: 1.0,
CONF_THINKING_BUDGET: MIN_THINKING_BUDGET,
CONF_THINKING_BUDGET: 0,
CONF_THINKING_EFFORT: "low",
CONF_TOOL_SEARCH: False,
CONF_WEB_SEARCH: False,
CONF_WEB_SEARCH_USER_LOCATION: False,
CONF_WEB_SEARCH_MAX_USES: 5,
}
MIN_THINKING_BUDGET = 1024
NON_THINKING_MODELS = [
"claude-3-haiku",
]
@@ -95,11 +81,6 @@ PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS = [
"claude-3-haiku",
]
TOOL_SEARCH_UNSUPPORTED_MODELS = [
"claude-3",
"claude-haiku",
]
DEPRECATED_MODELS = [
"claude-3",
]

View File

@@ -58,8 +58,6 @@ from anthropic.types import (
ToolChoiceAutoParam,
ToolChoiceToolParam,
ToolParam,
ToolSearchToolBm25_20251119Param,
ToolSearchToolResultBlock,
ToolUnionParam,
ToolUseBlock,
ToolUseBlockParam,
@@ -76,9 +74,6 @@ from anthropic.types.message_create_params import MessageCreateParamsStreaming
from anthropic.types.text_editor_code_execution_tool_result_block_param import (
Content as TextEditorCodeExecutionToolResultBlockParamContentParam,
)
from anthropic.types.tool_search_tool_result_block_param import (
Content as ToolSearchToolResultBlockParamContentParam,
)
import voluptuous as vol
from voluptuous_openapi import convert
@@ -96,11 +91,9 @@ from .const import (
CONF_CHAT_MODEL,
CONF_CODE_EXECUTION,
CONF_MAX_TOKENS,
CONF_PROMPT_CACHING,
CONF_TEMPERATURE,
CONF_THINKING_BUDGET,
CONF_THINKING_EFFORT,
CONF_TOOL_SEARCH,
CONF_WEB_SEARCH,
CONF_WEB_SEARCH_CITY,
CONF_WEB_SEARCH_COUNTRY,
@@ -116,7 +109,6 @@ from .const import (
NON_THINKING_MODELS,
PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS,
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS,
PromptCaching,
)
from .coordinator import AnthropicConfigEntry, AnthropicCoordinator
@@ -210,7 +202,7 @@ class ContentDetails:
]
def _convert_content( # noqa: C901
def _convert_content(
chat_content: Iterable[conversation.Content],
) -> tuple[list[MessageParam], str | None]:
"""Transform HA chat_log content into Anthropic API format."""
@@ -263,15 +255,6 @@ def _convert_content( # noqa: C901
content.tool_result,
),
}
elif content.tool_name == "tool_search":
tool_result_block = {
"type": "tool_search_tool_result",
"tool_use_id": content.tool_call_id,
"content": cast(
ToolSearchToolResultBlockParamContentParam,
content.tool_result,
),
}
else:
tool_result_block = {
"type": "tool_result",
@@ -402,7 +385,6 @@ def _convert_content( # noqa: C901
"code_execution",
"bash_code_execution",
"text_editor_code_execution",
"tool_search_tool_bm25",
],
tool_call.tool_name,
),
@@ -415,7 +397,6 @@ def _convert_content( # noqa: C901
"code_execution",
"bash_code_execution",
"text_editor_code_execution",
"tool_search_tool_bm25",
]
else ToolUseBlockParam(
type="tool_use",
@@ -577,7 +558,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
CodeExecutionToolResultBlock,
BashCodeExecutionToolResultBlock,
TextEditorCodeExecutionToolResultBlock,
ToolSearchToolResultBlock,
),
):
if content_details:
@@ -698,7 +678,7 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
entry_type=dr.DeviceEntryType.SERVICE,
)
async def _async_handle_chat_log( # noqa: C901
async def _async_handle_chat_log(
self,
chat_log: conversation.ChatLog,
structure_name: str | None = None,
@@ -708,20 +688,21 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
"""Generate an answer for the chat log."""
options = self.subentry.data
preloaded_tools = [
"HassTurnOn",
"HassTurnOff",
"GetLiveContext",
"code_execution",
"web_search",
]
system = chat_log.content[0]
if not isinstance(system, conversation.SystemContent):
raise HomeAssistantError(
translation_domain=DOMAIN, translation_key="system_message_not_found"
)
# System prompt with caching enabled
system_prompt: list[TextBlockParam] = [
TextBlockParam(
type="text",
text=system.content,
cache_control={"type": "ephemeral"},
)
]
messages, container_id = _convert_content(chat_log.content[1:])
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
@@ -730,28 +711,11 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
model=model,
messages=messages,
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
system=system.content,
system=system_prompt,
stream=True,
container=container_id,
)
if (
options.get(CONF_PROMPT_CACHING, DEFAULT[CONF_PROMPT_CACHING])
== PromptCaching.PROMPT
):
model_args["system"] = [
{
"type": "text",
"text": system.content,
"cache_control": {"type": "ephemeral"},
}
]
elif (
options.get(CONF_PROMPT_CACHING, DEFAULT[CONF_PROMPT_CACHING])
== PromptCaching.AUTOMATIC
):
model_args["cache_control"] = {"type": "ephemeral"}
if not model.startswith(tuple(NON_ADAPTIVE_THINKING_MODELS)):
thinking_effort = options.get(
CONF_THINKING_EFFORT, DEFAULT[CONF_THINKING_EFFORT]
@@ -910,23 +874,8 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
),
)
)
preloaded_tools.append(structure_name)
if tools:
if (
options.get(CONF_TOOL_SEARCH, DEFAULT[CONF_TOOL_SEARCH])
and len(tools) > len(preloaded_tools) + 1
):
for tool in tools:
if not tool["name"].endswith(tuple(preloaded_tools)):
tool["defer_loading"] = True
tools.append(
ToolSearchToolBm25_20251119Param(
type="tool_search_tool_bm25_20251119",
name="tool_search_tool_bm25",
)
)
model_args["tools"] = tools
coordinator = self.entry.runtime_data
@@ -970,7 +919,6 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
except anthropic.AnthropicError as err:
# Non-connection error, mark connection as healthy
coordinator.async_set_updated_data(None)
LOGGER.error("Error while talking to Anthropic: %s", err)
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="api_error",

View File

@@ -8,6 +8,6 @@
"documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"quality_scale": "bronze",
"requirements": ["anthropic==0.83.0"]
}

View File

@@ -47,13 +47,11 @@
"data": {
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
"prompt_caching": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::prompt_caching%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
},
"data_description": {
"chat_model": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::chat_model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::max_tokens%]",
"prompt_caching": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::prompt_caching%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::temperature%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
@@ -74,7 +72,6 @@
"code_execution": "[%key:component::anthropic::config_subentries::conversation::step::model::data::code_execution%]",
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
"thinking_effort": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_effort%]",
"tool_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::tool_search%]",
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
@@ -83,7 +80,6 @@
"code_execution": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::code_execution%]",
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
"thinking_effort": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_effort%]",
"tool_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::tool_search%]",
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
@@ -107,13 +103,11 @@
"data": {
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "Maximum tokens to return in response",
"prompt_caching": "Caching strategy",
"temperature": "Temperature"
},
"data_description": {
"chat_model": "The model to serve the responses.",
"max_tokens": "Limit the number of response tokens.",
"prompt_caching": "Optimize your API cost and response times based on your usage.",
"temperature": "Control the randomness of the response, trading off between creativity and coherence."
},
"title": "Advanced settings"
@@ -138,7 +132,6 @@
"code_execution": "Code execution",
"thinking_budget": "Thinking budget",
"thinking_effort": "Thinking effort",
"tool_search": "Enable tool search tool",
"user_location": "Include home location",
"web_search": "Enable web search",
"web_search_max_uses": "Maximum web searches"
@@ -147,7 +140,6 @@
"code_execution": "Allow the model to execute code in a secure sandbox environment, enabling it to analyze data and perform complex calculations.",
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
"thinking_effort": "Control how many tokens Claude uses when responding, trading off between response thoroughness and token efficiency",
"tool_search": "Enable dynamic tool discovery instead of preloading all tools into the context",
"user_location": "Localize search results based on home location",
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
"web_search_max_uses": "Limit the number of searches performed per response"
@@ -218,13 +210,6 @@
}
},
"selector": {
"prompt_caching": {
"options": {
"automatic": "Full",
"off": "Disabled",
"prompt": "System prompt"
}
},
"thinking_effort": {
"options": {
"high": "[%key:common::state::high%]",

View File

@@ -29,7 +29,7 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["axis"],
"requirements": ["axis==68"],
"requirements": ["axis==67"],
"ssdp": [
{
"manufacturer": "AXIS"

View File

@@ -74,12 +74,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) ->
translation_domain=DOMAIN,
translation_key="invalid_bucket_name",
) from err
except exception.BadRequest as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="bad_request",
translation_placeholders={"error_message": str(err)},
) from err
except (
exception.B2ConnectionError,
exception.B2RequestTimeout,

View File

@@ -174,14 +174,6 @@ class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
)
errors[CONF_BUCKET] = "invalid_bucket_name"
except exception.BadRequest as err:
_LOGGER.error(
"Backblaze B2 API rejected the request for Key ID '%s': %s",
user_input[CONF_KEY_ID],
err,
)
errors["base"] = "bad_request"
placeholders["error_message"] = str(err)
except (
exception.B2ConnectionError,
exception.B2RequestTimeout,

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_push",
"loggers": ["b2sdk"],
"quality_scale": "bronze",
"requirements": ["b2sdk==2.10.4"]
"requirements": ["b2sdk==2.10.1"]
}

View File

@@ -6,7 +6,6 @@
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"error": {
"bad_request": "The Backblaze B2 API rejected the request: {error_message}",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_bucket_name": "[%key:component::backblaze_b2::exceptions::invalid_bucket_name::message%]",
"invalid_capability": "[%key:component::backblaze_b2::exceptions::invalid_capability::message%]",
@@ -61,9 +60,6 @@
}
},
"exceptions": {
"bad_request": {
"message": "The Backblaze B2 API rejected the request: {error_message}"
},
"cannot_connect": {
"message": "Cannot connect to endpoint"
},

View File

@@ -23,7 +23,7 @@ from . import util
from .agent import BackupAgent
from .const import DATA_MANAGER
from .manager import BackupManager
from .models import AgentBackup, BackupNotFound, InvalidBackupFilename
from .models import AgentBackup, BackupNotFound
@callback
@@ -195,11 +195,6 @@ class UploadBackupView(HomeAssistantView):
backup_id = await manager.async_receive_backup(
contents=contents, agent_ids=agent_ids
)
except InvalidBackupFilename as err:
return Response(
body=str(err),
status=HTTPStatus.BAD_REQUEST,
)
except OSError as err:
return Response(
body=f"Can't write backup file: {err}",

View File

@@ -68,7 +68,6 @@ from .models import (
BackupReaderWriterError,
BaseBackup,
Folder,
InvalidBackupFilename,
)
from .store import BackupStore
from .util import (
@@ -1007,14 +1006,6 @@ class BackupManager:
) -> str:
"""Receive and store a backup file from upload."""
contents.chunk_size = BUF_SIZE
suggested_filename = contents.filename or "backup.tar"
safe_filename = PureWindowsPath(suggested_filename).name
if (
not safe_filename
or safe_filename != suggested_filename
or safe_filename == ".."
):
raise InvalidBackupFilename(f"Invalid filename: {suggested_filename}")
self.async_on_backup_event(
ReceiveBackupEvent(
reason=None,
@@ -1025,7 +1016,7 @@ class BackupManager:
written_backup = await self._reader_writer.async_receive_backup(
agent_ids=agent_ids,
stream=contents,
suggested_filename=suggested_filename,
suggested_filename=contents.filename or "backup.tar",
)
self.async_on_backup_event(
ReceiveBackupEvent(
@@ -1966,7 +1957,10 @@ class CoreBackupReaderWriter(BackupReaderWriter):
suggested_filename: str,
) -> WrittenBackup:
"""Receive a backup."""
temp_file = Path(self.temp_backup_dir, suggested_filename)
safe_filename = PureWindowsPath(suggested_filename).name
if not safe_filename or safe_filename == "..":
safe_filename = "backup.tar"
temp_file = Path(self.temp_backup_dir, safe_filename)
async_add_executor_job = self._hass.async_add_executor_job
await async_add_executor_job(make_backup_dir, self.temp_backup_dir)

View File

@@ -8,6 +8,6 @@
"integration_type": "service",
"iot_class": "calculated",
"quality_scale": "internal",
"requirements": ["cronsim==2.7", "securetar==2026.4.1"],
"requirements": ["cronsim==2.7", "securetar==2026.2.0"],
"single_config_entry": true
}

View File

@@ -95,12 +95,6 @@ class BackupReaderWriterError(BackupError):
error_code = "backup_reader_writer_error"
class InvalidBackupFilename(BackupManagerError):
"""Raised when a backup filename is invalid."""
error_code = "invalid_backup_filename"
class BackupNotFound(BackupAgentError, BackupManagerError):
"""Raised when a backup is not found."""

View File

@@ -22,7 +22,6 @@ from securetar import (
SecureTarFile,
SecureTarReadError,
SecureTarRootKeyContext,
get_archive_max_ciphertext_size,
)
from homeassistant.core import HomeAssistant
@@ -384,12 +383,9 @@ def _encrypt_backup(
if prefix not in expected_archives:
LOGGER.debug("Unknown inner tar file %s will not be encrypted", obj.name)
continue
if (fileobj := input_tar.extractfile(obj)) is None:
LOGGER.debug(
"Non regular inner tar file %s will not be encrypted", obj.name
)
continue
output_archive.import_tar(fileobj, obj, derived_key_id=inner_tar_idx)
output_archive.import_tar(
input_tar.extractfile(obj), obj, derived_key_id=inner_tar_idx
)
inner_tar_idx += 1
@@ -423,7 +419,7 @@ class _CipherBackupStreamer:
hass: HomeAssistant,
backup: AgentBackup,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
password: str,
password: str | None,
) -> None:
"""Initialize."""
self._workers: list[_CipherWorkerStatus] = []
@@ -435,9 +431,7 @@ class _CipherBackupStreamer:
def size(self) -> int:
"""Return the maximum size of the decrypted or encrypted backup."""
return get_archive_max_ciphertext_size(
self._backup.size, SECURETAR_CREATE_VERSION, self._num_tar_files()
)
return self._backup.size + self._num_tar_files() * tarfile.RECORDSIZE
def _num_tar_files(self) -> int:
"""Return the number of inner tar files."""

View File

@@ -20,7 +20,7 @@
"bluetooth-adapters==2.1.0",
"bluetooth-auto-recovery==1.5.3",
"bluetooth-data-tools==1.28.4",
"dbus-fast==4.0.4",
"habluetooth==6.0.0"
"dbus-fast==3.1.2",
"habluetooth==5.11.1"
]
}

View File

@@ -1,7 +1,7 @@
{
"issues": {
"integration_removed": {
"description": "The BMW Connected Drive integration has been removed from Home Assistant.\n\nIn September 2025, BMW blocked third-party access to their servers by adding additional security measures. For EU-registered cars, a [community integration]({custom_component_url}) using BMW's CarData API is available as an alternative.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing BMW Connected Drive integration entries]({entries}).",
"description": "The BMW Connected Drive integration has been removed from Home Assistant.\n\nIn September 2025, BMW blocked third-party access to their servers by adding additional security measures. For EU-registered cars, a community-developed [custom component]({custom_component_url}) using BMW's CarData API is available as an alternative.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing BMW Connected Drive integration entries]({entries}).",
"title": "The BMW Connected Drive integration has been removed"
}
}

View File

@@ -10,7 +10,6 @@ from bsblan import (
BSBLAN,
BSBLANAuthError,
BSBLANConnectionError,
BSBLANError,
HotWaterConfig,
HotWaterSchedule,
HotWaterState,
@@ -51,7 +50,7 @@ class BSBLanFastData:
state: State
sensor: Sensor
dhw: HotWaterState | None = None
dhw: HotWaterState
@dataclass
@@ -112,6 +111,7 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
# This reduces response time significantly (~0.2s per parameter)
state = await self.client.state(include=STATE_INCLUDE)
sensor = await self.client.sensor(include=SENSOR_INCLUDE)
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
except BSBLANAuthError as err:
raise ConfigEntryAuthFailed(
@@ -126,19 +126,6 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
translation_placeholders={"host": host},
) from err
# Fetch DHW state separately - device may not support hot water
dhw: HotWaterState | None = None
try:
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
except BSBLANError:
# Preserve last known DHW state if available (entity may depend on it)
if self.data:
dhw = self.data.dhw
LOGGER.debug(
"DHW (Domestic Hot Water) state not available on device at %s",
self.config_entry.data[CONF_HOST],
)
return BSBLanFastData(
state=state,
sensor=sensor,
@@ -172,6 +159,13 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
dhw_config = await self.client.hot_water_config(include=DHW_CONFIG_INCLUDE)
dhw_schedule = await self.client.hot_water_schedule()
except AttributeError:
# Device does not support DHW functionality
LOGGER.debug(
"DHW (Domestic Hot Water) not available on device at %s",
self.config_entry.data[CONF_HOST],
)
return BSBLanSlowData()
except (BSBLANConnectionError, BSBLANAuthError) as err:
# If config update fails, keep existing data
LOGGER.debug(
@@ -183,13 +177,6 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
return self.data
# First fetch failed, return empty data
return BSBLanSlowData()
except BSBLANError, AttributeError:
# Device does not support DHW functionality
LOGGER.debug(
"DHW (Domestic Hot Water) not available on device at %s",
self.config_entry.data[CONF_HOST],
)
return BSBLanSlowData()
return BSBLanSlowData(
dhw_config=dhw_config,

View File

@@ -22,9 +22,7 @@ async def async_get_config_entry_diagnostics(
"fast_coordinator_data": {
"state": data.fast_coordinator.data.state.model_dump(),
"sensor": data.fast_coordinator.data.sensor.model_dump(),
"dhw": data.fast_coordinator.data.dhw.model_dump()
if data.fast_coordinator.data.dhw
else None,
"dhw": data.fast_coordinator.data.dhw.model_dump(),
},
"static": data.static.model_dump() if data.static is not None else None,
}

View File

@@ -2,9 +2,6 @@
from __future__ import annotations
from yarl import URL
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers.device_registry import (
CONNECTION_NETWORK_MAC,
DeviceInfo,
@@ -13,7 +10,7 @@ from homeassistant.helpers.device_registry import (
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import BSBLanData
from .const import DEFAULT_PORT, DOMAIN
from .const import DOMAIN
from .coordinator import BSBLanCoordinator, BSBLanFastCoordinator, BSBLanSlowCoordinator
@@ -25,8 +22,7 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
def __init__(self, coordinator: _T, data: BSBLanData) -> None:
"""Initialize BSBLan entity with device info."""
super().__init__(coordinator)
host = coordinator.config_entry.data[CONF_HOST]
port = coordinator.config_entry.data.get(CONF_PORT, DEFAULT_PORT)
host = coordinator.config_entry.data["host"]
mac = data.device.MAC
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, mac)},
@@ -48,7 +44,7 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
else None
),
sw_version=data.device.version,
configuration_url=str(URL.build(scheme="http", host=host, port=port)),
configuration_url=f"http://{host}",
)

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from typing import Any
from bsblan import BSBLANError, HotWaterState, SetHotWaterParam
from bsblan import BSBLANError, SetHotWaterParam
from homeassistant.components.water_heater import (
STATE_ECO,
@@ -46,10 +46,8 @@ async def async_setup_entry(
data = entry.runtime_data
# Only create water heater entity if DHW (Domestic Hot Water) is available
# Check if we have any DHW-related data indicating water heater support
dhw_data = data.fast_coordinator.data.dhw
if dhw_data is None:
# Device does not support DHW, skip water heater setup
return
if (
dhw_data.operating_mode is None
and dhw_data.nominal_setpoint is None
@@ -109,21 +107,11 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
else:
self._attr_max_temp = 65.0 # Default maximum
@property
def _dhw(self) -> HotWaterState:
"""Return DHW state data.
This entity is only created when DHW data is available.
"""
dhw = self.coordinator.data.dhw
assert dhw is not None
return dhw
@property
def current_operation(self) -> str | None:
"""Return current operation."""
if (
operating_mode := self._dhw.operating_mode
operating_mode := self.coordinator.data.dhw.operating_mode
) is None or operating_mode.value is None:
return None
return BSBLAN_TO_HA_OPERATION_MODE.get(operating_mode.value)
@@ -131,14 +119,16 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""
if (current_temp := self._dhw.dhw_actual_value_top_temperature) is None:
if (
current_temp := self.coordinator.data.dhw.dhw_actual_value_top_temperature
) is None:
return None
return current_temp.value
@property
def target_temperature(self) -> float | None:
"""Return the temperature we try to reach."""
if (target_temp := self._dhw.nominal_setpoint) is None:
if (target_temp := self.coordinator.data.dhw.nominal_setpoint) is None:
return None
return target_temp.value

View File

@@ -17,7 +17,6 @@ import voluptuous as vol
from homeassistant.components import frontend, http, websocket_api
from homeassistant.components.websocket_api import (
ERR_INVALID_FORMAT,
ERR_NOT_FOUND,
ERR_NOT_SUPPORTED,
ActiveConnection,
@@ -34,7 +33,6 @@ from homeassistant.core import (
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_point_in_time
@@ -78,7 +76,6 @@ ENTITY_ID_FORMAT = DOMAIN + ".{}"
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
SCAN_INTERVAL = datetime.timedelta(seconds=60)
EVENT_LISTENER_DEBOUNCE_COOLDOWN = 1.0 # seconds
# Don't support rrules more often than daily
VALID_FREQS = {"DAILY", "WEEKLY", "MONTHLY", "YEARLY"}
@@ -323,7 +320,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
websocket_api.async_register_command(hass, handle_calendar_event_create)
websocket_api.async_register_command(hass, handle_calendar_event_delete)
websocket_api.async_register_command(hass, handle_calendar_event_update)
websocket_api.async_register_command(hass, handle_calendar_event_subscribe)
component.async_register_entity_service(
CREATE_EVENT_SERVICE,
@@ -521,17 +517,6 @@ class CalendarEntity(Entity):
_entity_component_unrecorded_attributes = frozenset({"description"})
_alarm_unsubs: list[CALLBACK_TYPE] | None = None
_event_listeners: (
list[
tuple[
datetime.datetime,
datetime.datetime,
Callable[[list[JsonValueType] | None], None],
]
]
| None
) = None
_event_listener_debouncer: Debouncer[None] | None = None
_attr_initial_color: str | None
@@ -600,10 +585,6 @@ class CalendarEntity(Entity):
the current or upcoming event.
"""
super()._async_write_ha_state()
# Notify websocket subscribers of event changes (debounced)
if self._event_listeners and self._event_listener_debouncer:
self._event_listener_debouncer.async_schedule_call()
if self._alarm_unsubs is None:
self._alarm_unsubs = []
_LOGGER.debug(
@@ -644,13 +625,6 @@ class CalendarEntity(Entity):
event.end_datetime_local,
)
@callback
def _async_cancel_event_listener_debouncer(self) -> None:
"""Cancel and clear the event listener debouncer."""
if self._event_listener_debouncer:
self._event_listener_debouncer.async_cancel()
self._event_listener_debouncer = None
async def async_will_remove_from_hass(self) -> None:
"""Run when entity will be removed from hass.
@@ -659,90 +633,6 @@ class CalendarEntity(Entity):
for unsub in self._alarm_unsubs or ():
unsub()
self._alarm_unsubs = None
self._async_cancel_event_listener_debouncer()
@final
@callback
def async_subscribe_events(
self,
start_date: datetime.datetime,
end_date: datetime.datetime,
event_listener: Callable[[list[JsonValueType] | None], None],
) -> CALLBACK_TYPE:
"""Subscribe to calendar event updates.
Called by websocket API.
"""
if self._event_listeners is None:
self._event_listeners = []
if self._event_listener_debouncer is None:
self._event_listener_debouncer = Debouncer(
self.hass,
_LOGGER,
cooldown=EVENT_LISTENER_DEBOUNCE_COOLDOWN,
immediate=True,
function=self.async_update_event_listeners,
)
listener_data = (start_date, end_date, event_listener)
self._event_listeners.append(listener_data)
@callback
def unsubscribe() -> None:
if self._event_listeners:
self._event_listeners.remove(listener_data)
if not self._event_listeners:
self._async_cancel_event_listener_debouncer()
return unsubscribe
@final
@callback
def async_update_event_listeners(self) -> None:
"""Push updated calendar events to all listeners."""
if not self._event_listeners:
return
for start_date, end_date, listener in self._event_listeners:
self.async_update_single_event_listener(start_date, end_date, listener)
@final
@callback
def async_update_single_event_listener(
self,
start_date: datetime.datetime,
end_date: datetime.datetime,
listener: Callable[[list[JsonValueType] | None], None],
) -> None:
"""Schedule an event fetch and push to a single listener."""
self.hass.async_create_task(
self._async_update_listener(start_date, end_date, listener)
)
async def _async_update_listener(
self,
start_date: datetime.datetime,
end_date: datetime.datetime,
listener: Callable[[list[JsonValueType] | None], None],
) -> None:
"""Fetch events and push to a single listener."""
try:
events = await self.async_get_events(self.hass, start_date, end_date)
except HomeAssistantError as err:
_LOGGER.debug(
"Error fetching calendar events for %s: %s",
self.entity_id,
err,
)
listener(None)
return
event_list: list[JsonValueType] = [
dataclasses.asdict(event, dict_factory=_list_events_dict_factory)
for event in events
]
listener(event_list)
async def async_get_events(
self,
@@ -977,65 +867,6 @@ async def handle_calendar_event_update(
connection.send_result(msg["id"])
@websocket_api.websocket_command(
{
vol.Required("type"): "calendar/event/subscribe",
vol.Required("entity_id"): cv.entity_domain(DOMAIN),
vol.Required("start"): cv.datetime,
vol.Required("end"): cv.datetime,
}
)
@websocket_api.async_response
async def handle_calendar_event_subscribe(
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
) -> None:
"""Subscribe to calendar event updates."""
entity_id: str = msg["entity_id"]
if not (entity := hass.data[DATA_COMPONENT].get_entity(entity_id)):
connection.send_error(
msg["id"],
ERR_NOT_FOUND,
f"Calendar entity not found: {entity_id}",
)
return
start_date = dt_util.as_local(msg["start"])
end_date = dt_util.as_local(msg["end"])
if start_date >= end_date:
connection.send_error(
msg["id"],
ERR_INVALID_FORMAT,
"Start must be before end",
)
return
subscription_id = msg["id"]
@callback
def event_listener(events: list[JsonValueType] | None) -> None:
"""Push updated calendar events to websocket."""
if subscription_id not in connection.subscriptions:
return
connection.send_message(
websocket_api.event_message(
subscription_id,
{
"events": events,
},
)
)
connection.subscriptions[subscription_id] = entity.async_subscribe_events(
start_date, end_date, event_listener
)
connection.send_result(subscription_id)
# Push initial events only to the new subscriber
entity.async_update_single_event_listener(start_date, end_date, event_listener)
def _validate_timespan(
values: dict[str, Any],
) -> tuple[datetime.datetime | datetime.date, datetime.datetime | datetime.date]:

View File

@@ -51,24 +51,18 @@ rules:
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: Each config entry represents a single device.
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-disabled-by-default: todo
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow:
status: exempt
comment: No user-configurable settings in the configuration flow.
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: Integration does not register repair issues.
stale-devices:
status: exempt
comment: Each config entry represents a single device.
stale-devices: todo
# Platinum
async-dependency: done

View File

@@ -2,8 +2,6 @@
from __future__ import annotations
from datetime import datetime, timedelta
from pycasperglow import GlowState
from homeassistant.components.sensor import (
@@ -15,8 +13,6 @@ from homeassistant.const import PERCENTAGE, EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.dt import utcnow
from homeassistant.util.variance import ignore_variance
from .coordinator import CasperGlowConfigEntry, CasperGlowCoordinator
from .entity import CasperGlowEntity
@@ -30,12 +26,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the sensor platform for Casper Glow."""
async_add_entities(
[
CasperGlowBatterySensor(entry.runtime_data),
CasperGlowDimmingEndTimeSensor(entry.runtime_data),
]
)
async_add_entities([CasperGlowBatterySensor(entry.runtime_data)])
class CasperGlowBatterySensor(CasperGlowEntity, SensorEntity):
@@ -68,67 +59,3 @@ class CasperGlowBatterySensor(CasperGlowEntity, SensorEntity):
if new_value != self._attr_native_value:
self._attr_native_value = new_value
self.async_write_ha_state()
class CasperGlowDimmingEndTimeSensor(CasperGlowEntity, SensorEntity):
"""Sensor entity for Casper Glow dimming end time."""
_attr_translation_key = "dimming_end_time"
_attr_device_class = SensorDeviceClass.TIMESTAMP
_attr_entity_registry_enabled_default = False
def __init__(self, coordinator: CasperGlowCoordinator) -> None:
"""Initialize the dimming end time sensor."""
super().__init__(coordinator)
self._attr_unique_id = (
f"{format_mac(coordinator.device.address)}_dimming_end_time"
)
self._is_paused = False
self._projected_end_time = ignore_variance(
self._calculate_end_time,
timedelta(minutes=1, seconds=30),
)
self._update_from_state(coordinator.device.state)
@staticmethod
def _calculate_end_time(remaining_ms: int) -> datetime:
"""Calculate projected dimming end time from remaining milliseconds."""
return utcnow() + timedelta(milliseconds=remaining_ms)
async def async_added_to_hass(self) -> None:
"""Register state update callback when entity is added."""
await super().async_added_to_hass()
self.async_on_remove(
self._device.register_callback(self._async_handle_state_update)
)
def _reset_projected_end_time(self) -> None:
"""Clear the projected end time and reset the variance filter."""
self._attr_native_value = None
self._projected_end_time = ignore_variance(
self._calculate_end_time,
timedelta(minutes=1, seconds=30),
)
@callback
def _update_from_state(self, state: GlowState) -> None:
"""Update entity attributes from device state."""
if state.is_paused is not None:
self._is_paused = state.is_paused
if self._is_paused:
self._reset_projected_end_time()
return
remaining_ms = state.dimming_time_remaining_ms
if not remaining_ms:
if remaining_ms == 0 or state.is_on is False:
self._reset_projected_end_time()
return
self._attr_native_value = self._projected_end_time(remaining_ms)
@callback
def _async_handle_state_update(self, state: GlowState) -> None:
"""Handle a state update from the device."""
self._update_from_state(state)
self.async_write_ha_state()

View File

@@ -44,11 +44,6 @@
"dimming_time": {
"name": "Dimming time"
}
},
"sensor": {
"dimming_end_time": {
"name": "Dimming end time"
}
}
},
"exceptions": {

View File

@@ -44,10 +44,10 @@
},
"services": {
"show_lovelace_view": {
"description": "Shows a dashboard view on a Google Cast device.",
"description": "Shows a dashboard view on a Chromecast device.",
"fields": {
"dashboard_path": {
"description": "The URL path of the dashboard to show, defaults to `lovelace` if not specified.",
"description": "The URL path of the dashboard to show, defaults to lovelace if not specified.",
"name": "Dashboard path"
},
"entity_id": {
@@ -59,7 +59,7 @@
"name": "View path"
}
},
"name": "Show dashboard view via Google Cast"
"name": "Show dashboard view"
}
}
}

View File

@@ -8,5 +8,5 @@
"iot_class": "local_polling",
"loggers": ["aiocomelit"],
"quality_scale": "platinum",
"requirements": ["aiocomelit==2.0.2"]
"requirements": ["aiocomelit==2.0.1"]
}

View File

@@ -6,17 +6,25 @@ rules:
appropriate-polling:
status: exempt
comment: The integration uses a push-based mechanism with a background sync task, not polling.
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
brands:
status: done
common-modules:
status: done
config-flow-test-coverage:
status: done
config-flow:
status: done
dependency-transparency:
status: done
docs-actions:
status: exempt
comment: The integration does not expose any custom service actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
docs-high-level-description:
status: done
docs-installation-instructions:
status: done
docs-removal-instructions:
status: done
entity-event-setup:
status: exempt
comment: This integration does not create its own entities.
@@ -26,30 +34,40 @@ rules:
has-entity-name:
status: exempt
comment: This integration does not create its own entities.
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
runtime-data:
status: done
test-before-configure:
status: done
test-before-setup:
status: done
unique-config-entry:
status: done
# Silver
action-exceptions:
status: exempt
comment: The integration does not expose any custom service actions.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
config-entry-unloading:
status: done
docs-configuration-parameters:
status: done
docs-installation-parameters:
status: done
entity-unavailable:
status: exempt
comment: This integration does not create its own entities.
integration-owner: done
integration-owner:
status: done
log-when-unavailable:
status: done
comment: The integration logs a single message when the EnergyID service is unavailable.
parallel-updates:
status: exempt
comment: This integration does not create its own entities.
reauthentication-flow: done
test-coverage: done
reauthentication-flow:
status: done
test-coverage:
status: done
# Gold
devices:
@@ -64,15 +82,21 @@ rules:
discovery-update-info:
status: exempt
comment: No discovery mechanism is used.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-data-update:
status: done
docs-examples:
status: done
docs-known-limitations:
status: done
docs-supported-devices:
status: exempt
comment: This is a service integration not tied to specific device models.
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
docs-supported-functions:
status: done
docs-troubleshooting:
status: done
docs-use-cases:
status: done
dynamic-devices:
status: exempt
comment: The integration creates a single device entry for the service connection.
@@ -88,7 +112,8 @@ rules:
entity-translations:
status: exempt
comment: This integration does not create its own entities.
exception-translations: done
exception-translations:
status: done
icon-translations:
status: exempt
comment: This integration does not create its own entities.
@@ -103,8 +128,10 @@ rules:
comment: Creates a single service device entry tied to the config entry.
# Platinum
async-dependency: done
inject-websession: done
async-dependency:
status: done
inject-websession:
status: done
strict-typing:
status: todo
comment: Full strict typing compliance will be addressed in a future update.

View File

@@ -19,7 +19,7 @@ from pyfirefly.models import Account, Bill, Budget, Category, Currency
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -79,13 +79,13 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise UpdateFailed(
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise UpdateFailed(
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},

View File

@@ -7,14 +7,13 @@ from typing import Any, cast
from fitbit import Fitbit
from fitbit.exceptions import HTTPException, HTTPUnauthorized
from fitbit_web_api import ApiClient, Configuration, DevicesApi, UserApi
from fitbit_web_api import ApiClient, Configuration, DevicesApi
from fitbit_web_api.exceptions import (
ApiException,
OpenApiException,
UnauthorizedException,
)
from fitbit_web_api.models.device import Device
from fitbit_web_api.models.user import User
from requests.exceptions import ConnectionError as RequestsConnectionError
from homeassistant.const import CONF_ACCESS_TOKEN
@@ -25,6 +24,7 @@ from homeassistant.util.unit_system import METRIC_SYSTEM
from .const import FitbitUnitSystem
from .exceptions import FitbitApiException, FitbitAuthException
from .model import FitbitProfile
_LOGGER = logging.getLogger(__name__)
@@ -46,7 +46,7 @@ class FitbitApi(ABC):
) -> None:
"""Initialize Fitbit auth."""
self._hass = hass
self._profile: User | None = None
self._profile: FitbitProfile | None = None
self._unit_system = unit_system
@abstractmethod
@@ -74,16 +74,18 @@ class FitbitApi(ABC):
configuration.access_token = token[CONF_ACCESS_TOKEN]
return await self._hass.async_add_executor_job(ApiClient, configuration)
async def async_get_user_profile(self) -> User:
async def async_get_user_profile(self) -> FitbitProfile:
"""Return the user profile from the API."""
if self._profile is None:
client = await self._async_get_fitbit_web_api()
api = UserApi(client)
api_response = await self._run_async(api.get_profile)
if not api_response.user:
raise FitbitApiException("No user profile returned from fitbit API")
_LOGGER.debug("user_profile_get=%s", api_response.to_dict())
self._profile = api_response.user
client = await self._async_get_client()
response: dict[str, Any] = await self._run(client.user_profile_get)
_LOGGER.debug("user_profile_get=%s", response)
profile = response["user"]
self._profile = FitbitProfile(
encoded_id=profile["encodedId"],
display_name=profile["displayName"],
locale=profile.get("locale"),
)
return self._profile
async def async_get_unit_system(self) -> FitbitUnitSystem:

View File

@@ -85,6 +85,4 @@ class OAuth2FlowHandler(
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=profile.display_name or "Fitbit", data=data
)
return self.async_create_entry(title=profile.display_name, data=data)

View File

@@ -7,6 +7,20 @@ from typing import Any
from .const import CONF_CLOCK_FORMAT, CONF_MONITORED_RESOURCES, FitbitScope
@dataclass
class FitbitProfile:
"""User profile from the Fitbit API response."""
encoded_id: str
"""The ID representing the Fitbit user."""
display_name: str
"""The name shown when the user's friends look at their Fitbit profile."""
locale: str | None
"""The locale defined in the user's Fitbit account settings."""
@dataclass
class FitbitConfig:
"""Information from the fitbit ConfigEntry data."""

View File

@@ -25,7 +25,6 @@ from homeassistant.const import (
UnitOfVolume,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.icon import icon_for_battery_level
@@ -537,8 +536,6 @@ async def async_setup_entry(
# These are run serially to reuse the cached user profile, not gathered
# to avoid two racing requests.
user_profile = await api.async_get_user_profile()
if user_profile.encoded_id is None:
raise ConfigEntryNotReady("Could not get user profile")
unit_system = await api.async_get_unit_system()
fitbit_config = config_from_entry_data(entry.data)

View File

@@ -2,14 +2,18 @@
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from .coordinator import FlussConfigEntry, FlussDataUpdateCoordinator
from .coordinator import FlussDataUpdateCoordinator
PLATFORMS: list[Platform] = [Platform.BUTTON]
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
async def async_setup_entry(
hass: HomeAssistant,
entry: FlussConfigEntry,

View File

@@ -1,13 +1,16 @@
"""Support for Fluss Devices."""
from homeassistant.components.button import ButtonEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import FlussApiClientError, FlussConfigEntry
from .coordinator import FlussApiClientError, FlussDataUpdateCoordinator
from .entity import FlussEntity
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -14,12 +14,11 @@ from homeassistant.components.button import (
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er, issue_registry as ir
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import BUTTON_TYPE_WOL, CONNECTION_TYPE_LAN, DOMAIN, MeshRoles
from .const import BUTTON_TYPE_WOL, CONNECTION_TYPE_LAN, MeshRoles
from .coordinator import FRITZ_DATA_KEY, AvmWrapper, FritzConfigEntry, FritzData
from .entity import FritzDeviceBase
from .helpers import _is_tracked
@@ -64,38 +63,10 @@ BUTTONS: Final = [
translation_key="cleanup",
entity_category=EntityCategory.CONFIG,
press_action=lambda avm_wrapper: avm_wrapper.async_trigger_cleanup(),
entity_registry_enabled_default=False,
),
]
def repair_issue_cleanup(hass: HomeAssistant, avm_wrapper: AvmWrapper) -> None:
"""Repair issue for cleanup button."""
entity_registry = er.async_get(hass)
if (
(
entity_button := entity_registry.async_get_entity_id(
"button", DOMAIN, f"{avm_wrapper.unique_id}-cleanup"
)
)
and (entity_entry := entity_registry.async_get(entity_button))
and not entity_entry.disabled
):
# Deprecate the 'cleanup' button: create a Repairs issue for users
ir.async_create_issue(
hass,
domain=DOMAIN,
issue_id="deprecated_cleanup_button",
is_fixable=False,
is_persistent=True,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_cleanup_button",
translation_placeholders={"removal_version": "2026.11.0"},
breaks_in_ha_version="2026.11.0",
)
async def async_setup_entry(
hass: HomeAssistant,
entry: FritzConfigEntry,
@@ -111,7 +82,6 @@ async def async_setup_entry(
if avm_wrapper.mesh_role == MeshRoles.SLAVE:
async_add_entities(entities_list)
repair_issue_cleanup(hass, avm_wrapper)
return
data_fritz = hass.data[FRITZ_DATA_KEY]
@@ -130,8 +100,6 @@ async def async_setup_entry(
)
)
repair_issue_cleanup(hass, avm_wrapper)
class FritzButton(ButtonEntity):
"""Defines a Fritz!Box base button."""
@@ -158,12 +126,6 @@ class FritzButton(ButtonEntity):
async def async_press(self) -> None:
"""Triggers Fritz!Box service."""
if self.entity_description.key == "cleanup":
_LOGGER.warning(
"The 'cleanup' button is deprecated and will be removed in Home Assistant Core 2026.11.0. "
"Please update your automations and dashboards to remove any usage of this button. "
"The action is now performed automatically at each data refresh",
)
await self.entity_description.press_action(self.avm_wrapper)

View File

@@ -80,5 +80,6 @@ FRITZ_EXCEPTIONS = (
FRITZ_AUTH_EXCEPTIONS = (FritzAuthorizationError, FritzSecurityError)
WIFI_STANDARD = {1: "2.4Ghz", 2: "5Ghz", 3: "5Ghz", 4: "Guest"}
CONNECTION_TYPE_LAN = "LAN"

View File

@@ -332,10 +332,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
translation_placeholders={"error": str(ex)},
) from ex
_LOGGER.debug("entity_data: %s", entity_data)
await self.async_trigger_cleanup()
_LOGGER.debug("enity_data: %s", entity_data)
return entity_data
@property
@@ -379,8 +376,6 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
"""Return device Mac address."""
if not self._unique_id:
raise ClassSetupMissing
# Unique ID is the serial number of the device
# which is the MAC of the device without the colons
return dr.format_mac(self._unique_id)
@property

View File

@@ -8,7 +8,7 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["fritzconnection"],
"quality_scale": "gold",
"quality_scale": "silver",
"requirements": ["fritzconnection[qr]==1.15.1", "xmltodict==1.0.2"],
"ssdp": [
{

View File

@@ -56,7 +56,9 @@ rules:
repair-issues:
status: exempt
comment: no known use cases for repair issues or flows, yet
stale-devices: done
stale-devices:
status: todo
comment: automate the current cleanup process and deprecate the corresponding button
# Platinum
async-dependency:

View File

@@ -169,18 +169,6 @@
"switch": {
"internet_access": {
"name": "Internet access"
},
"wi_fi_guest": {
"name": "Guest"
},
"wi_fi_main_2_4ghz": {
"name": "Main 2.4 GHz"
},
"wi_fi_main_5ghz": {
"name": "Main 5 GHz"
},
"wi_fi_main_5ghz_high_6ghz": {
"name": "Main 5 GHz High / 6 GHz"
}
}
},
@@ -207,12 +195,6 @@
"message": "Error while updating the data: {error}"
}
},
"issues": {
"deprecated_cleanup_button": {
"description": "The 'Cleanup' button is deprecated and will be removed in Home Assistant Core {removal_version}. Please update your automations and dashboards to remove any usage of this button. The action is now performed automatically at each data refresh.",
"title": "'Cleanup' button is deprecated"
}
},
"options": {
"step": {
"init": {

View File

@@ -9,7 +9,6 @@ from homeassistant.components.network import async_get_source_ip
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
@@ -23,8 +22,8 @@ from .const import (
SWITCH_TYPE_PORTFORWARD,
SWITCH_TYPE_PROFILE,
SWITCH_TYPE_WIFINETWORK,
WIFI_STANDARD,
MeshRoles,
Platform,
)
from .coordinator import FRITZ_DATA_KEY, AvmWrapper, FritzConfigEntry, FritzData
from .entity import FritzBoxBaseEntity
@@ -36,101 +35,6 @@ _LOGGER = logging.getLogger(__name__)
# Set a sane value to avoid too many updates
PARALLEL_UPDATES = 5
WIFI_STANDARD = {1: "2.4Ghz", 2: "5Ghz", 3: "5Ghz", 4: "Guest"}
WIFI_BAND = {
0: {"band": "2.4Ghz"},
1: {"band": "5Ghz"},
3: {"band": "5Ghz High / 6Ghz"},
}
def _wifi_naming(
network_info: dict[str, Any], wifi_index: int, wifi_count: int
) -> str | None:
"""Return a friendly name for a Wi-Fi network."""
if wifi_index == 2 and wifi_count == 4:
# In case of 4 Wi-Fi networks, the 2nd one is used for internal communication
# between mesh devices and should not be named like the others to avoid confusion
return None
if (wifi_index + 1) == wifi_count:
# Last Wi-Fi network in the guest network, both bands available
return "Guest"
# Cast to correct type for type checker
if (result := WIFI_BAND.get(wifi_index)) is not None:
return f"Main {result['band']}"
return None
async def _get_wifi_networks_list(avm_wrapper: AvmWrapper) -> dict[int, dict[str, Any]]:
"""Get a list of wifi networks with friendly names."""
wifi_count = len(
[
s
for s in avm_wrapper.connection.services
if s.startswith("WLANConfiguration")
]
)
_LOGGER.debug("WiFi networks count: %s", wifi_count)
networks: dict[int, dict[str, Any]] = {}
for i in range(1, wifi_count + 1):
network_info = await avm_wrapper.async_get_wlan_configuration(i)
if (switch_name := _wifi_naming(network_info, i - 1, wifi_count)) is None:
continue
networks[i] = network_info
networks[i]["switch_name"] = switch_name
_LOGGER.debug("WiFi networks list: %s", networks)
return networks
async def _migrate_to_new_unique_id(
hass: HomeAssistant, avm_wrapper: AvmWrapper
) -> None:
"""Migrate old unique ids to new unique ids."""
_LOGGER.debug("Migrating Wi-Fi switches")
entity_registry = er.async_get(hass)
networks = await _get_wifi_networks_list(avm_wrapper)
for index, network in networks.items():
description = f"Wi-Fi {network['NewSSID']}"
if (
len(
[
j
for j, n in networks.items()
if slugify(n["NewSSID"]) == slugify(network["NewSSID"])
]
)
> 1
):
description += f" ({WIFI_STANDARD[index]})"
old_unique_id = f"{avm_wrapper.unique_id}-{slugify(description)}"
new_unique_id = f"{avm_wrapper.unique_id}-wi_fi_{slugify(_wifi_naming(network, index - 1, len(networks)))}"
entity_id = entity_registry.async_get_entity_id(
Platform.SWITCH, DOMAIN, old_unique_id
)
if entity_id is not None:
entity_registry.async_update_entity(
entity_id,
new_unique_id=new_unique_id,
)
_LOGGER.debug(
"Migrating Wi-FI switch unique_id from [%s] to [%s]",
old_unique_id,
new_unique_id,
)
_LOGGER.debug("Migration completed")
async def _async_deflection_entities_list(
avm_wrapper: AvmWrapper, device_friendly_name: str
@@ -221,7 +125,35 @@ async def _async_wifi_entities_list(
#
# https://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/wlanconfigSCPD.pdf
#
networks = await _get_wifi_networks_list(avm_wrapper)
wifi_count = len(
[
s
for s in avm_wrapper.connection.services
if s.startswith("WLANConfiguration")
]
)
_LOGGER.debug("WiFi networks count: %s", wifi_count)
networks: dict[int, dict[str, Any]] = {}
for i in range(1, wifi_count + 1):
network_info = await avm_wrapper.async_get_wlan_configuration(i)
# Devices with 4 WLAN services, use the 2nd for internal communications
if not (wifi_count == 4 and i == 2):
networks[i] = network_info
for i, network in networks.copy().items():
networks[i]["switch_name"] = network["NewSSID"]
if (
len(
[
j
for j, n in networks.items()
if slugify(n["NewSSID"]) == slugify(network["NewSSID"])
]
)
> 1
):
networks[i]["switch_name"] += f" ({WIFI_STANDARD[i]})"
_LOGGER.debug("WiFi networks list: %s", networks)
return [
FritzBoxWifiSwitch(avm_wrapper, device_friendly_name, index, data)
for index, data in networks.items()
@@ -293,8 +225,6 @@ async def async_setup_entry(
local_ip = await async_get_source_ip(avm_wrapper.hass, target_ip=avm_wrapper.host)
await _migrate_to_new_unique_id(hass, avm_wrapper)
entities_list = await async_all_entities_list(
avm_wrapper,
entry.title,
@@ -624,11 +554,8 @@ class FritzBoxWifiSwitch(FritzBoxBaseSwitch):
)
self._network_num = network_num
description = f"Wi-Fi {network_data['switch_name']}"
self._attr_translation_key = slugify(description)
switch_info = SwitchInfo(
description=description,
description=f"Wi-Fi {network_data['switch_name']}",
friendly_name=device_friendly_name,
icon="mdi:wifi",
type=SWITCH_TYPE_WIFINETWORK,

View File

@@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/frontier_silicon",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["afsapi==0.3.1"],
"requirements": ["afsapi==0.2.7"],
"ssdp": [
{
"st": "urn:schemas-frontier-silicon-com:undok:fsapi:1"

View File

@@ -38,7 +38,6 @@ PLATFORMS: list[Platform] = [
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
Platform.TEXT,
Platform.VALVE,
]
LOGGER = logging.getLogger(__name__)

View File

@@ -1,12 +0,0 @@
{
"entity": {
"text": {
"contour_name": {
"default": "mdi:vector-polygon"
},
"position_name": {
"default": "mdi:map-marker-radius"
}
}
}
}

View File

@@ -4,13 +4,7 @@ from __future__ import annotations
from dataclasses import dataclass, field
from gardena_bluetooth.const import (
AquaContourWatering,
DeviceConfiguration,
Sensor,
Spray,
Valve,
)
from gardena_bluetooth.const import DeviceConfiguration, Sensor, Spray, Valve
from gardena_bluetooth.parse import (
Characteristic,
CharacteristicInt,
@@ -64,18 +58,6 @@ DESCRIPTIONS = (
char=Valve.manual_watering_time,
device_class=NumberDeviceClass.DURATION,
),
GardenaBluetoothNumberEntityDescription(
key=AquaContourWatering.manual_watering_time.unique_id,
translation_key="manual_watering_time",
native_unit_of_measurement=UnitOfTime.SECONDS,
mode=NumberMode.BOX,
native_min_value=0.0,
native_max_value=24 * 60 * 60,
native_step=60,
entity_category=EntityCategory.CONFIG,
char=AquaContourWatering.manual_watering_time,
device_class=NumberDeviceClass.DURATION,
),
GardenaBluetoothNumberEntityDescription(
key=Valve.remaining_open_time.unique_id,
translation_key="remaining_open_time",
@@ -87,17 +69,6 @@ DESCRIPTIONS = (
char=Valve.remaining_open_time,
device_class=NumberDeviceClass.DURATION,
),
GardenaBluetoothNumberEntityDescription(
key=AquaContourWatering.remaining_watering_time.unique_id,
translation_key="remaining_watering_time",
native_unit_of_measurement=UnitOfTime.SECONDS,
native_min_value=0.0,
native_max_value=24 * 60 * 60,
native_step=60.0,
entity_category=EntityCategory.DIAGNOSTIC,
char=AquaContourWatering.remaining_watering_time,
device_class=NumberDeviceClass.DURATION,
),
GardenaBluetoothNumberEntityDescription(
key=DeviceConfiguration.rain_pause.unique_id,
translation_key="rain_pause",

View File

@@ -50,9 +50,6 @@
"remaining_open_time": {
"name": "Remaining open time"
},
"remaining_watering_time": {
"name": "Remaining watering time"
},
"seasonal_adjust": {
"name": "Seasonal adjust"
},
@@ -154,14 +151,6 @@
"state": {
"name": "[%key:common::state::open%]"
}
},
"text": {
"contour_name": {
"name": "Contour {number}"
},
"position_name": {
"name": "Position {number}"
}
}
}
}

View File

@@ -1,88 +0,0 @@
"""Support for text entities."""
from __future__ import annotations
from dataclasses import dataclass
from gardena_bluetooth.const import AquaContourContours, AquaContourPosition
from gardena_bluetooth.parse import CharacteristicNullString
from homeassistant.components.text import TextEntity, TextEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import GardenaBluetoothConfigEntry
from .entity import GardenaBluetoothDescriptorEntity
@dataclass(frozen=True, kw_only=True)
class GardenaBluetoothTextEntityDescription(TextEntityDescription):
"""Description of entity."""
char: CharacteristicNullString
@property
def context(self) -> set[str]:
"""Context needed for update coordinator."""
return {self.char.uuid}
DESCRIPTIONS = (
*(
GardenaBluetoothTextEntityDescription(
key=f"position_{i}_name",
translation_key="position_name",
translation_placeholders={"number": str(i)},
has_entity_name=True,
char=getattr(AquaContourPosition, f"position_name_{i}"),
native_max=20,
entity_category=EntityCategory.CONFIG,
)
for i in range(1, 6)
),
*(
GardenaBluetoothTextEntityDescription(
key=f"contour_{i}_name",
translation_key="contour_name",
translation_placeholders={"number": str(i)},
has_entity_name=True,
char=getattr(AquaContourContours, f"contour_name_{i}"),
native_max=20,
entity_category=EntityCategory.CONFIG,
)
for i in range(1, 6)
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: GardenaBluetoothConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up text based on a config entry."""
coordinator = entry.runtime_data
entities = [
GardenaBluetoothTextEntity(coordinator, description, description.context)
for description in DESCRIPTIONS
if description.char.unique_id in coordinator.characteristics
]
async_add_entities(entities)
class GardenaBluetoothTextEntity(GardenaBluetoothDescriptorEntity, TextEntity):
"""Representation of a text entity."""
entity_description: GardenaBluetoothTextEntityDescription
@property
def native_value(self) -> str | None:
"""Return the value reported by the text."""
char = self.entity_description.char
return self.coordinator.get_cached(char)
async def async_set_value(self, value: str) -> None:
"""Change the text."""
char = self.entity_description.char
await self.coordinator.write(char, value)

View File

@@ -111,7 +111,7 @@
"name": "Add event"
},
"create_event": {
"description": "Adds a new event to a Google calendar.",
"description": "Adds a new calendar event.",
"fields": {
"description": {
"description": "[%key:component::google::services::add_event::fields::description::description%]",
@@ -146,7 +146,7 @@
"name": "Summary"
}
},
"name": "Create event in Google Calendar"
"name": "Create event"
}
}
}

View File

@@ -94,7 +94,7 @@
"name": "Filename"
}
},
"name": "Upload media to Google Photos"
"name": "Upload media"
}
}
}

View File

@@ -11,8 +11,4 @@ CONF_LISTENING_PORT_DEFAULT = 4002
CONF_DISCOVERY_INTERVAL_DEFAULT = 60
SCAN_INTERVAL = timedelta(seconds=30)
# A device is considered unavailable if we have not heard a status response
# from it for three consecutive poll cycles. This tolerates a single dropped
# UDP response plus some jitter before flapping the entity state.
DEVICE_TIMEOUT = SCAN_INTERVAL * 3
DISCOVERY_TIMEOUT = 5

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from datetime import datetime
import logging
from typing import Any
@@ -23,7 +22,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DEVICE_TIMEOUT, DOMAIN, MANUFACTURER
from .const import DOMAIN, MANUFACTURER
from .coordinator import GoveeLocalApiCoordinator, GoveeLocalConfigEntry
_LOGGER = logging.getLogger(__name__)
@@ -119,19 +118,6 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
serial_number=device.fingerprint,
)
@property
def available(self) -> bool:
"""Return if the device is reachable.
The underlying library updates ``lastseen`` whenever the device
replies to a status request. The coordinator polls every
``SCAN_INTERVAL``, so if we have not heard back within
``DEVICE_TIMEOUT`` we consider the device offline.
"""
if not super().available:
return False
return datetime.now() - self._device.lastseen < DEVICE_TIMEOUT
@property
def is_on(self) -> bool:
"""Return true if device is on (brightness above 0)."""
@@ -219,8 +205,8 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
@callback
def _update_callback(self, device: GoveeDevice) -> None:
"""Handle device state updates pushed by the library."""
self.async_write_ha_state()
if self.hass:
self.async_write_ha_state()
def _save_last_color_state(self) -> None:
color_mode = self.color_mode

View File

@@ -5,7 +5,8 @@ rules:
comment: The integration registers no actions.
appropriate-polling: done
brands: done
common-modules: done
common-modules:
status: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
@@ -46,7 +47,8 @@ rules:
test-coverage: done
# Gold
devices: done
devices:
status: done
diagnostics: todo
discovery-update-info:
status: exempt

View File

@@ -4,7 +4,8 @@ rules:
status: exempt
comment: |
This integration doesn't add actions.
appropriate-polling: done
appropriate-polling:
status: done
brands: done
common-modules: done
config-flow-test-coverage: done

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
"requirements": ["holidays==0.94", "babel==2.15.0"]
"requirements": ["holidays==0.93", "babel==2.15.0"]
}

View File

@@ -77,12 +77,7 @@ AFFECTS_TO_SELECTED_PROGRAM = "selected_program"
TRANSLATION_KEYS_PROGRAMS_MAP = {
bsh_key_to_translation_key(program.value): program
for program in ProgramKey
if program
not in (
ProgramKey.UNKNOWN,
ProgramKey.BSH_COMMON_FAVORITE_001,
ProgramKey.BSH_COMMON_FAVORITE_002,
)
if program not in (ProgramKey.UNKNOWN, ProgramKey.BSH_COMMON_FAVORITE_001)
}
PROGRAMS_TRANSLATION_KEYS_MAP = {

View File

@@ -533,11 +533,7 @@ class HomeConnectApplianceCoordinator(DataUpdateCoordinator[HomeConnectAppliance
current_program_key = program.key
program_options = program.options
if (
current_program_key
in (
ProgramKey.BSH_COMMON_FAVORITE_001,
ProgramKey.BSH_COMMON_FAVORITE_002,
)
current_program_key == ProgramKey.BSH_COMMON_FAVORITE_001
and program_options
):
# The API doesn't allow to fetch the options from the favorite program.
@@ -620,11 +616,7 @@ class HomeConnectApplianceCoordinator(DataUpdateCoordinator[HomeConnectAppliance
options_to_notify = options.copy()
options.clear()
if (
program_key
in (
ProgramKey.BSH_COMMON_FAVORITE_001,
ProgramKey.BSH_COMMON_FAVORITE_002,
)
program_key == ProgramKey.BSH_COMMON_FAVORITE_001
and (event := events.get(EventKey.BSH_COMMON_OPTION_BASE_PROGRAM))
and isinstance(event.value, str)
):

View File

@@ -23,6 +23,6 @@
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],
"quality_scale": "platinum",
"requirements": ["aiohomeconnect==0.34.0"],
"requirements": ["aiohomeconnect==0.33.0"],
"zeroconf": ["_homeconnect._tcp.local."]
}

View File

@@ -436,11 +436,7 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity):
else None
)
if (
program_key
in (
ProgramKey.BSH_COMMON_FAVORITE_001,
ProgramKey.BSH_COMMON_FAVORITE_002,
)
program_key == ProgramKey.BSH_COMMON_FAVORITE_001
and (
base_program_event := self.appliance.events.get(
EventKey.BSH_COMMON_OPTION_BASE_PROGRAM

View File

@@ -57,7 +57,6 @@ BSH_PROGRAM_SENSORS = (
"CookProcessor",
"Dishwasher",
"Dryer",
"Microwave",
"Hood",
"Oven",
"Washer",
@@ -199,7 +198,7 @@ EVENT_SENSORS = (
options=EVENT_OPTIONS,
default_value="off",
translation_key="program_aborted",
appliance_types=("Dishwasher", "Microwave", "CleaningRobot", "CookProcessor"),
appliance_types=("Dishwasher", "CleaningRobot", "CookProcessor"),
),
HomeConnectSensorEntityDescription(
key=EventKey.BSH_COMMON_EVENT_PROGRAM_FINISHED,
@@ -212,7 +211,6 @@ EVENT_SENSORS = (
"Dishwasher",
"Washer",
"Dryer",
"Microwave",
"WasherDryer",
"CleaningRobot",
"CookProcessor",
@@ -601,6 +599,8 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity):
class HomeConnectProgramSensor(HomeConnectSensor):
"""Sensor class for Home Connect sensors that reports information related to the running program."""
program_running: bool = False
async def async_added_to_hass(self) -> None:
"""Register listener."""
await super().async_added_to_hass()
@@ -614,20 +614,17 @@ class HomeConnectProgramSensor(HomeConnectSensor):
@callback
def _handle_operation_state_event(self) -> None:
"""Update status when an event for the entity is received."""
if not self.program_running:
# reset the value when the program is not running, paused or finished
self._attr_native_value = None
self.async_write_ha_state()
@property
def program_running(self) -> bool:
"""Return whether a program is running, paused or finished."""
status = self.appliance.status.get(StatusKey.BSH_COMMON_OPERATION_STATE)
return status is not None and status.value in [
self.program_running = (
status := self.appliance.status.get(StatusKey.BSH_COMMON_OPERATION_STATE)
) is not None and status.value in [
BSH_OPERATION_STATE_RUN,
BSH_OPERATION_STATE_PAUSE,
BSH_OPERATION_STATE_FINISHED,
]
if not self.program_running:
# reset the value when the program is not running, paused or finished
self._attr_native_value = None
self.async_write_ha_state()
@property
def available(self) -> bool:
@@ -638,6 +635,13 @@ class HomeConnectProgramSensor(HomeConnectSensor):
def update_native_value(self) -> None:
"""Update the program sensor's status."""
self.program_running = (
status := self.appliance.status.get(StatusKey.BSH_COMMON_OPERATION_STATE)
) is not None and status.value in [
BSH_OPERATION_STATE_RUN,
BSH_OPERATION_STATE_PAUSE,
BSH_OPERATION_STATE_FINISHED,
]
event = self.appliance.events.get(cast(EventKey, self.bsh_key))
if event:
self._update_native_value(event.value)

View File

@@ -148,7 +148,7 @@
},
"step": {
"init": {
"description": "The integration `{domain}` could not be found. This happens when a (community) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
"description": "The integration `{domain}` could not be found. This happens when a (custom) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
"menu_options": {
"confirm": "Remove previous configurations",
"ignore": "Ignore"
@@ -236,7 +236,7 @@
"description": "Restarts Home Assistant.",
"fields": {
"safe_mode": {
"description": "Disable community integrations and community cards.",
"description": "Disable custom integrations and custom cards.",
"name": "Safe mode"
}
},

View File

@@ -172,8 +172,6 @@ async def async_migrate_entry(
f"USB device {device} is missing, cannot migrate"
)
assert isinstance(usb_info, USBDevice)
hass.config_entries.async_update_entry(
config_entry,
data={

View File

@@ -81,4 +81,5 @@ rules:
inject-websession:
status: exempt
comment: Underlying huawei-lte-api does not use aiohttp or httpx, so this does not apply.
strict-typing: done
strict-typing:
status: done

View File

@@ -10,6 +10,6 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["aiohue"],
"requirements": ["aiohue==4.8.1"],
"requirements": ["aiohue==4.8.0"],
"zeroconf": ["_hue._tcp.local."]
}

View File

@@ -184,8 +184,10 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
)
def _should_poll(self) -> bool:
"""Return True if at least one mower is not OFF."""
return any(mower.mower.state != MowerStates.OFF for mower in self.data.values())
"""Return True if at least one mower is connected and at least one is not OFF."""
return any(mower.metadata.connected for mower in self.data.values()) and any(
mower.mower.state != MowerStates.OFF for mower in self.data.values()
)
async def _pong_watchdog(self) -> None:
"""Watchdog to check for pong messages."""

View File

@@ -47,11 +47,11 @@ rules:
discovery: todo
discovery-update-info: todo
docs-data-update: done
docs-examples: done
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-troubleshooting: todo
docs-use-cases: done
dynamic-devices:
status: exempt

View File

@@ -12,5 +12,5 @@
"iot_class": "local_polling",
"loggers": ["incomfortclient"],
"quality_scale": "platinum",
"requirements": ["incomfort-client==0.7.0"]
"requirements": ["incomfort-client==0.6.12"]
}

View File

@@ -92,13 +92,11 @@
"central_heating": "Central heating",
"central_heating_low": "Central heating low",
"central_heating_rf": "Central heating rf",
"central_heating_wait": "Central heating waiting",
"cv_temperature_too_high_e1": "Temperature too high",
"flame_detection_fault_e6": "Flame detection fault",
"frost": "Frost protection",
"gas_valve_relay_faulty_e29": "Gas valve relay faulty",
"gas_valve_relay_faulty_e30": "[%key:component::incomfort::entity::water_heater::boiler::state::gas_valve_relay_faulty_e29%]",
"hp_error_recovery": "Heat pump error recovery",
"incorrect_fan_speed_e8": "Incorrect fan speed",
"no_flame_signal_e4": "No flame signal",
"off": "[%key:common::state::off%]",
@@ -122,7 +120,6 @@
"service": "Service",
"shortcut_outside_sensor_temperature_e27": "Shortcut outside temperature sensor",
"standby": "[%key:common::state::standby%]",
"starting_ch": "Starting central heating",
"tapwater": "Tap water",
"tapwater_int": "Tap water internal",
"unknown": "Unknown"

View File

@@ -52,13 +52,20 @@ rules:
discovery:
status: exempt
comment: Integration does not support network discovery
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
docs-data-update:
status: todo
docs-examples:
status: todo
docs-known-limitations:
status: todo
docs-supported-devices:
status: todo
docs-supported-functions:
status: todo
docs-troubleshooting:
status: todo
docs-use-cases:
status: todo
dynamic-devices:
status: exempt
comment: Integration represents a single device, not a hub with multiple devices
@@ -66,8 +73,10 @@ rules:
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
exception-translations:
status: todo
icon-translations:
status: todo
reconfiguration-flow: done
repair-issues:
status: exempt
@@ -79,4 +88,5 @@ rules:
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo
strict-typing:
status: todo

View File

@@ -1,7 +1,5 @@
"""Provide info to system health."""
from typing import Any
from homeassistant.components import system_health
from homeassistant.core import HomeAssistant, callback
@@ -16,7 +14,7 @@ def async_register(
register.async_register_info(system_health_info)
async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
async def system_health_info(hass):
"""Get info for the info page."""
return {
"api_endpoint_reachable": system_health.async_check_can_reach_url(

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["jvcprojector"],
"requirements": ["pyjvcprojector==2.0.5"]
"requirements": ["pyjvcprojector==2.0.3"]
}

View File

@@ -123,9 +123,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
knx_module.ui_time_server_controller.start(
knx_module.xknx, knx_module.config_store.get_time_server_config()
)
knx_module.ui_expose_controller.start(
hass, knx_module.xknx, knx_module.config_store.get_exposes()
)
if CONF_KNX_EXPOSE in config:
knx_module.yaml_exposures.extend(
create_combined_knx_exposure(hass, knx_module.xknx, config[CONF_KNX_EXPOSE])
@@ -160,7 +157,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
for exposure in knx_module.service_exposures.values():
exposure.async_remove()
knx_module.ui_time_server_controller.stop()
knx_module.ui_expose_controller.stop()
configured_platforms_yaml = {
platform

View File

@@ -58,7 +58,6 @@ from .expose import KnxExposeEntity, KnxExposeTime
from .project import KNXProject
from .repairs import data_secure_group_key_issue_dispatcher
from .storage.config_store import KNXConfigStore
from .storage.expose_controller import ExposeController
from .storage.time_server import TimeServerController
from .telegrams import Telegrams
@@ -77,7 +76,6 @@ class KNXModule:
self.connected = False
self.yaml_exposures: list[KnxExposeEntity | KnxExposeTime] = []
self.service_exposures: dict[str, KnxExposeEntity | KnxExposeTime] = {}
self.ui_expose_controller = ExposeController()
self.ui_time_server_controller = TimeServerController()
self.entry = entry

View File

@@ -11,16 +11,15 @@ from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.storage import Store
from homeassistant.util.ulid import ulid_now
from ..const import DOMAIN, KNX_MODULE_KEY
from ..const import DOMAIN
from . import migration
from .const import CONF_DATA
from .expose_controller import KNXExposeStoreModel, KNXExposeStoreOptionModel
from .time_server import KNXTimeServerStoreModel
_LOGGER = logging.getLogger(__name__)
STORAGE_VERSION: Final = 2
STORAGE_VERSION_MINOR: Final = 4
STORAGE_VERSION_MINOR: Final = 3
STORAGE_KEY: Final = f"{DOMAIN}/config_store.json"
type KNXPlatformStoreModel = dict[str, dict[str, Any]] # unique_id: configuration
@@ -33,7 +32,6 @@ class KNXConfigStoreModel(TypedDict):
"""Represent KNX configuration store data."""
entities: KNXEntityStoreModel
expose: KNXExposeStoreModel
time_server: KNXTimeServerStoreModel
@@ -70,10 +68,6 @@ class _KNXConfigStoreStorage(Store[KNXConfigStoreModel]):
# version 2.3 introduced in 2026.3
migration.migrate_2_2_to_2_3(old_data)
if old_major_version <= 2 and old_minor_version < 4:
# version 2.4 introduced in 2026.5
migration.migrate_2_3_to_2_4(old_data)
return old_data
@@ -93,7 +87,6 @@ class KNXConfigStore:
)
self.data = KNXConfigStoreModel( # initialize with default structure
entities={},
expose={},
time_server={},
)
self._platform_controllers: dict[Platform, PlatformControllerBase] = {}
@@ -106,10 +99,6 @@ class KNXConfigStore:
"Loaded KNX config data from storage. %s entity platforms",
len(self.data["entities"]),
)
_LOGGER.debug(
"Loaded KNX config data from storage. %s exposes",
len(self.data["expose"]),
)
def add_platform(
self, platform: Platform, controller: PlatformControllerBase
@@ -194,48 +183,6 @@ class KNXConfigStore:
if registry_entry.unique_id in unique_ids
]
def get_exposes(self) -> KNXExposeStoreModel:
"""Return KNX entity state expose configuration."""
return self.data["expose"]
def get_expose_groups(self) -> dict[str, list[str]]:
"""Return KNX entity state exposes and their group addresses."""
return {
entity_id: [option["ga"]["write"] for option in config]
for entity_id, config in self.data["expose"].items()
}
def get_expose_config(self, entity_id: str) -> list[KNXExposeStoreOptionModel]:
"""Return KNX entity state expose configuration for an entity."""
return self.data["expose"].get(entity_id, [])
async def update_expose(
self, entity_id: str, expose_config: list[KNXExposeStoreOptionModel]
) -> None:
"""Update KNX expose configuration for an entity."""
knx_module = self.hass.data[KNX_MODULE_KEY]
expose_controller = knx_module.ui_expose_controller
expose_controller.update_entity_expose(
self.hass, knx_module.xknx, entity_id, expose_config
)
self.data["expose"][entity_id] = expose_config
await self._store.async_save(self.data)
async def delete_expose(self, entity_id: str) -> None:
"""Delete KNX expose configuration for an entity."""
knx_module = self.hass.data[KNX_MODULE_KEY]
expose_controller = knx_module.ui_expose_controller
expose_controller.remove_entity_expose(entity_id)
try:
del self.data["expose"][entity_id]
except KeyError as err:
raise ConfigStoreException(
f"Entity not found in expose configuration: {entity_id}"
) from err
await self._store.async_save(self.data)
@callback
def get_time_server_config(self) -> KNXTimeServerStoreModel:
"""Return KNX time server configuration."""
@@ -244,7 +191,7 @@ class KNXConfigStore:
async def update_time_server_config(self, config: KNXTimeServerStoreModel) -> None:
"""Update time server configuration."""
self.data["time_server"] = config
knx_module = self.hass.data[KNX_MODULE_KEY]
knx_module = self.hass.data.get(DOMAIN)
if knx_module:
knx_module.ui_time_server_controller.start(knx_module.xknx, config)
await self._store.async_save(self.data)

View File

@@ -1,154 +0,0 @@
"""KNX configuration storage for entity state exposes."""
from typing import Any, NotRequired, TypedDict
import voluptuous as vol
from xknx import XKNX
from xknx.dpt import DPTBase
from xknx.telegram.address import parse_device_group_address
from homeassistant.const import CONF_ENTITY_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import (
config_validation as cv,
selector,
template as template_helper,
)
from ..expose import KnxExposeEntity, KnxExposeOptions
from .entity_store_validation import validate_config_store_data
from .knx_selector import GASelector
type KNXExposeStoreModel = dict[
str, list[KNXExposeStoreOptionModel] # entity_id: configuration
]
class KNXExposeStoreOptionModel(TypedDict):
"""Represent KNX entity state expose configuration for an entity."""
ga: dict[str, Any] # group address configuration with write and dpt
attribute: NotRequired[str]
cooldown: NotRequired[float]
default: NotRequired[Any]
periodic_send: NotRequired[float]
respond_to_read: NotRequired[bool]
value_template: NotRequired[str]
class KNXExposeDataModel(TypedDict):
"""Represent a loaded KNX expose config for validation."""
entity_id: str
options: list[KNXExposeStoreOptionModel]
def validate_expose_template_no_coerce(value: str) -> str:
"""Validate a value is a valid expose template without coercing it to a Template object."""
temp = cv.template(value) # validate template
if temp.is_static:
raise vol.Invalid(
"Static templates are not supported. Template should start with '{{' and end with '}}'"
)
return value # return original string for storage and later template creation
EXPOSE_OPTION_SCHEMA = vol.Schema(
{
vol.Required("ga"): GASelector(
state=False,
passive=False,
write_required=True,
dpt=["numeric", "enum", "complex", "string"],
),
vol.Optional("attribute"): str,
vol.Optional("default"): object,
vol.Optional("cooldown"): cv.positive_float, # frontend renders to duration
vol.Optional("periodic_send"): cv.positive_float,
vol.Optional("respond_to_read"): bool,
vol.Optional("value_template"): validate_expose_template_no_coerce,
}
)
EXPOSE_CONFIG_SCHEMA = vol.Schema(
{
vol.Required(CONF_ENTITY_ID): selector.EntitySelector(),
vol.Required("options"): [EXPOSE_OPTION_SCHEMA],
},
extra=vol.REMOVE_EXTRA,
)
def validate_expose_data(data: dict) -> KNXExposeDataModel:
"""Validate and convert expose configuration data."""
return validate_config_store_data(EXPOSE_CONFIG_SCHEMA, data) # type: ignore[return-value]
def _store_to_expose_option(
hass: HomeAssistant, config: KNXExposeStoreOptionModel
) -> KnxExposeOptions:
"""Convert config store option model to expose options."""
ga = parse_device_group_address(config["ga"]["write"])
dpt: type[DPTBase] = DPTBase.parse_transcoder(config["ga"]["dpt"]) # type: ignore[assignment]
value_template = None
if (_value_template_config := config.get("value_template")) is not None:
value_template = template_helper.Template(_value_template_config, hass)
return KnxExposeOptions(
group_address=ga,
dpt=dpt,
attribute=config.get("attribute"),
cooldown=config.get("cooldown", 0),
default=config.get("default"),
periodic_send=config.get("periodic_send", 0),
respond_to_read=config.get("respond_to_read", True),
value_template=value_template,
)
class ExposeController:
"""Controller class for UI entity exposures."""
def __init__(self) -> None:
"""Initialize entity expose controller."""
self._entity_exposes: dict[str, KnxExposeEntity] = {}
@callback
def stop(self) -> None:
"""Shutdown entity expose controller."""
for expose in self._entity_exposes.values():
expose.async_remove()
self._entity_exposes.clear()
@callback
def start(
self, hass: HomeAssistant, xknx: XKNX, config: KNXExposeStoreModel
) -> None:
"""Update entity expose configuration."""
if self._entity_exposes:
self.stop()
for entity_id, options in config.items():
self.update_entity_expose(hass, xknx, entity_id, options)
@callback
def update_entity_expose(
self,
hass: HomeAssistant,
xknx: XKNX,
entity_id: str,
expose_config: list[KNXExposeStoreOptionModel],
) -> None:
"""Update entity expose configuration for an entity."""
self.remove_entity_expose(entity_id)
expose_options = [
_store_to_expose_option(hass, config) for config in expose_config
]
expose = KnxExposeEntity(hass, xknx, entity_id, expose_options)
self._entity_exposes[entity_id] = expose
expose.async_register()
@callback
def remove_entity_expose(self, entity_id: str) -> None:
"""Remove entity expose configuration for an entity."""
if entity_id in self._entity_exposes:
self._entity_exposes.pop(entity_id).async_remove()

View File

@@ -55,8 +55,3 @@ def migrate_2_1_to_2_2(data: dict[str, Any]) -> None:
def migrate_2_2_to_2_3(data: dict[str, Any]) -> None:
"""Migrate from schema 2.2 to schema 2.3."""
data.setdefault("time_server", {})
def migrate_2_3_to_2_4(data: dict[str, Any]) -> None:
"""Migrate from schema 2.3 to schema 2.4."""
data.setdefault("expose", {})

View File

@@ -950,48 +950,6 @@
"description": "Add and manage KNX entities",
"title": "Entities"
},
"expose": {
"create": {
"add_expose": "Add expose",
"attribute": {
"description": "Expose changes of a specific attribute of the entity instead of the state. Optional. If the attribute is not set, the entity state is exposed."
},
"cooldown": {
"description": "Minimum time between consecutive sends. This can be used to prevent high traffic on the KNX bus when values change very frequently. Only the most recent value during the cooldown period is sent.",
"label": "Cooldown"
},
"default": {
"description": "The value to send if the entity state is `unavailable` or `unknown`, or if the attribute is not set. If `default` is omitted, nothing is sent in these cases, but the last known value remains available for read requests.",
"label": "Default value"
},
"entity": {
"description": "Home Assistant entity to expose state changes to the KNX bus.",
"label": "Entity"
},
"ga": {
"label": "Group address"
},
"periodic_send": {
"description": "Time interval to automatically resend the current value to the KNX bus, even if it hasnt changed.",
"label": "Periodic send interval"
},
"respond_to_read": {
"description": "[%key:component::knx::config_panel::entities::create::_::knx::respond_to_read::description%]",
"label": "[%key:component::knx::config_panel::entities::create::_::knx::respond_to_read::label%]"
},
"section_advanced_options": {
"title": "Advanced options"
},
"show_raw_values": "Show raw values",
"title": "Add exposure",
"value_template": {
"description": "Optionally transform the entity state or attribute value before sending it to KNX using a template. The template receives the entity state or attribute value as `value` variable.",
"label": "Value template"
}
},
"description": "Expose Home Assistant entity states to the KNX bus",
"title": "Expose"
},
"group_monitor": {
"description": "Monitor KNX group communication",
"title": "Group monitor"

View File

@@ -35,7 +35,6 @@ from .storage.entity_store_validation import (
EntityStoreValidationSuccess,
validate_entity_data,
)
from .storage.expose_controller import validate_expose_data
from .storage.serialize import get_serialized_schema
from .storage.time_server import validate_time_server_data
from .telegrams import (
@@ -69,11 +68,6 @@ async def register_panel(hass: HomeAssistant) -> None:
websocket_api.async_register_command(hass, ws_get_schema)
websocket_api.async_register_command(hass, ws_get_time_server_config)
websocket_api.async_register_command(hass, ws_update_time_server_config)
websocket_api.async_register_command(hass, ws_get_expose_groups)
websocket_api.async_register_command(hass, ws_get_expose_config)
websocket_api.async_register_command(hass, ws_update_expose)
websocket_api.async_register_command(hass, ws_delete_expose)
websocket_api.async_register_command(hass, ws_validate_expose)
if DOMAIN not in hass.data.get("frontend_panels", {}):
await hass.http.async_register_static_paths(
@@ -594,142 +588,6 @@ def ws_create_device(
connection.send_result(msg["id"], _device.dict_repr)
########
# Expose
########
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "knx/get_expose_groups",
}
)
@provide_knx
@callback
def ws_get_expose_groups(
hass: HomeAssistant,
knx: KNXModule,
connection: websocket_api.ActiveConnection,
msg: dict,
) -> None:
"""Get exposes from config store."""
connection.send_result(msg["id"], knx.config_store.get_expose_groups())
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "knx/get_expose_config",
vol.Required("entity_id"): str,
}
)
@provide_knx
@callback
def ws_get_expose_config(
hass: HomeAssistant,
knx: KNXModule,
connection: websocket_api.ActiveConnection,
msg: dict,
) -> None:
"""Get expose configuration from config store."""
connection.send_result(
msg["id"], knx.config_store.get_expose_config(msg["entity_id"])
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "knx/update_expose",
vol.Required("entity_id"): str,
vol.Required("options"): list, # validation done in handler
}
)
@websocket_api.async_response
@provide_knx
async def ws_update_expose(
hass: HomeAssistant,
knx: KNXModule,
connection: websocket_api.ActiveConnection,
msg: dict,
) -> None:
"""Update expose configuration in config store."""
try:
validated_data = validate_expose_data(msg)
except EntityStoreValidationException as exc:
connection.send_result(msg["id"], exc.validation_error)
return
try:
await knx.config_store.update_expose(
validated_data["entity_id"], validated_data["options"]
)
except ConfigStoreException as err:
connection.send_error(
msg["id"], websocket_api.const.ERR_HOME_ASSISTANT_ERROR, str(err)
)
return
connection.send_result(
msg["id"], EntityStoreValidationSuccess(success=True, entity_id=None)
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "knx/delete_expose",
vol.Required("entity_id"): str,
}
)
@websocket_api.async_response
@provide_knx
async def ws_delete_expose(
hass: HomeAssistant,
knx: KNXModule,
connection: websocket_api.ActiveConnection,
msg: dict,
) -> None:
"""Delete expose configuration from config store."""
try:
await knx.config_store.delete_expose(msg["entity_id"])
except ConfigStoreException as err:
connection.send_error(
msg["id"], websocket_api.const.ERR_HOME_ASSISTANT_ERROR, str(err)
)
return
connection.send_result(msg["id"])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "knx/validate_expose",
vol.Required("entity_id"): str,
vol.Required("options"): list, # validation done in handler
}
)
@callback
def ws_validate_expose(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict,
) -> None:
"""Validate expose data."""
try:
validate_expose_data(msg)
except EntityStoreValidationException as exc:
connection.send_result(msg["id"], exc.validation_error)
return
connection.send_result(
msg["id"], EntityStoreValidationSuccess(success=True, entity_id=None)
)
#############
# Time server
#############
@websocket_api.require_admin
@websocket_api.websocket_command(
{

View File

@@ -20,7 +20,7 @@
"name": "[%key:common::config_flow::data::name%]"
}
},
"name": "Log activity"
"name": "Log"
}
},
"title": "Activity"

View File

@@ -1,6 +1,5 @@
"""The Lunatone integration."""
import logging
from typing import Final
from lunatone_rest_api_client import Auth, DALIBroadcast, Devices, Info
@@ -8,10 +7,9 @@ from lunatone_rest_api_client import Auth, DALIBroadcast, Devices, Info
from homeassistant.const import CONF_URL, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .config_flow import LunatoneConfigFlow
from .const import DOMAIN, MANUFACTURER
from .coordinator import (
LunatoneConfigEntry,
@@ -20,51 +18,9 @@ from .coordinator import (
LunatoneInfoDataUpdateCoordinator,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS: Final[list[Platform]] = [Platform.LIGHT]
async def _update_unique_id(
hass: HomeAssistant, entry: LunatoneConfigEntry, new_unique_id: str
) -> None:
_LOGGER.debug("Update unique ID")
# Update all associated entities
entity_registry = er.async_get(hass)
entities = er.async_entries_for_config_entry(entity_registry, entry.entry_id)
for entity in entities:
parts = list(entity.unique_id.partition("-"))
parts[0] = new_unique_id
entity_registry.async_update_entity(
entity.entity_id, new_unique_id="".join(parts)
)
# Update all associated devices
device_registry = dr.async_get(hass)
devices = dr.async_entries_for_config_entry(device_registry, entry.entry_id)
for device in devices:
identifier = device.identifiers.pop()
parts = list(identifier[1].partition("-"))
parts[0] = new_unique_id
device_registry.async_update_device(
device.id, new_identifiers={(identifier[0], "".join(parts))}
)
# Update the config entry itself
hass.config_entries.async_update_entry(
entry,
unique_id=new_unique_id,
minor_version=LunatoneConfigFlow.MINOR_VERSION,
version=LunatoneConfigFlow.VERSION,
)
_LOGGER.debug("Update of unique ID successful")
async def async_setup_entry(hass: HomeAssistant, entry: LunatoneConfigEntry) -> bool:
"""Set up Lunatone from a config entry."""
auth_api = Auth(async_get_clientsession(hass), entry.data[CONF_URL])
@@ -74,22 +30,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: LunatoneConfigEntry) ->
coordinator_info = LunatoneInfoDataUpdateCoordinator(hass, entry, info_api)
await coordinator_info.async_config_entry_first_refresh()
if info_api.data is None or info_api.serial_number is None:
if info_api.serial_number is None:
raise ConfigEntryError(
translation_domain=DOMAIN, translation_key="missing_device_info"
)
if info_api.uid is not None:
new_unique_id = info_api.uid.replace("-", "")
if new_unique_id != entry.unique_id:
await _update_unique_id(hass, entry, new_unique_id)
assert entry.unique_id
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, entry.unique_id)},
identifiers={(DOMAIN, str(info_api.serial_number))},
name=info_api.name,
manufacturer=MANUFACTURER,
sw_version=info_api.version,

View File

@@ -52,17 +52,14 @@ class LunatoneConfigFlow(ConfigFlow, domain=DOMAIN):
if info_api.serial_number is None:
errors["base"] = "missing_device_info"
else:
unique_id = str(info_api.serial_number)
if info_api.uid is not None:
unique_id = info_api.uid.replace("-", "")
await self.async_set_unique_id(unique_id)
await self.async_set_unique_id(str(info_api.serial_number))
if self.source == SOURCE_RECONFIGURE:
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(), data_updates=data, title=url
)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=url, data=data)
return self.async_create_entry(title=url, data={CONF_URL: url})
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,

View File

@@ -41,20 +41,17 @@ async def async_setup_entry(
coordinator_devices = config_entry.runtime_data.coordinator_devices
dali_line_broadcasts = config_entry.runtime_data.dali_line_broadcasts
assert config_entry.unique_id is not None
entities: list[LightEntity] = [
LunatoneLineBroadcastLight(
coordinator_info,
coordinator_devices,
dali_line_broadcast,
config_entry.unique_id,
coordinator_info, coordinator_devices, dali_line_broadcast
)
for dali_line_broadcast in dali_line_broadcasts
]
entities.extend(
[
LunatoneLight(coordinator_devices, device_id, config_entry.unique_id)
LunatoneLight(
coordinator_devices, device_id, coordinator_info.data.device.serial
)
for device_id in coordinator_devices.data
]
)
@@ -79,14 +76,14 @@ class LunatoneLight(
self,
coordinator: LunatoneDevicesDataUpdateCoordinator,
device_id: int,
config_entry_unique_id: str,
interface_serial_number: int,
) -> None:
"""Initialize a Lunatone light."""
super().__init__(coordinator)
self._device_id = device_id
self._config_entry_unique_id = config_entry_unique_id
self._device = self.coordinator.data[device_id]
self._attr_unique_id = f"{config_entry_unique_id}-device{device_id}"
self._interface_serial_number = interface_serial_number
self._device = self.coordinator.data[self._device_id]
self._attr_unique_id = f"{interface_serial_number}-device{device_id}"
@property
def device_info(self) -> DeviceInfo:
@@ -97,7 +94,7 @@ class LunatoneLight(
name=self._device.name,
via_device=(
DOMAIN,
f"{self._config_entry_unique_id}-line{self._device.data.line}",
f"{self._interface_serial_number}-line{self._device.data.line}",
),
)
@@ -182,7 +179,6 @@ class LunatoneLineBroadcastLight(
coordinator_info: LunatoneInfoDataUpdateCoordinator,
coordinator_devices: LunatoneDevicesDataUpdateCoordinator,
broadcast: DALIBroadcast,
config_entry_unique_id: str,
) -> None:
"""Initialize a Lunatone line broadcast light."""
super().__init__(coordinator_info)
@@ -191,7 +187,7 @@ class LunatoneLineBroadcastLight(
line = broadcast.line
self._attr_unique_id = f"{config_entry_unique_id}-line{line}"
self._attr_unique_id = f"{coordinator_info.data.device.serial}-line{line}"
line_device = self.coordinator.data.lines[str(line)].device
extra_info: dict = {}
@@ -206,7 +202,7 @@ class LunatoneLineBroadcastLight(
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self.unique_id)},
name=f"DALI Line {line}",
via_device=(DOMAIN, config_entry_unique_id),
via_device=(DOMAIN, str(coordinator_info.data.device.serial)),
**extra_info,
)

View File

@@ -2,8 +2,7 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unique_id_mismatch": "Please ensure you reconfigure against the same device."
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",

View File

@@ -4,20 +4,11 @@ from dataclasses import dataclass
import logging
from typing import Any, cast
from pylutron import (
Button,
Keypad,
Led,
Lutron,
LutronException,
OccupancyGroup,
Output,
)
from pylutron import Button, Keypad, Led, Lutron, OccupancyGroup, Output
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import DOMAIN
@@ -66,12 +57,8 @@ async def async_setup_entry(
pwd = config_entry.data[CONF_PASSWORD]
lutron_client = Lutron(host, uid, pwd)
try:
await hass.async_add_executor_job(lutron_client.load_xml_db)
lutron_client.connect()
except LutronException as ex:
raise ConfigEntryNotReady(f"Failed to connect to Lutron repeater: {ex}") from ex
await hass.async_add_executor_job(lutron_client.load_xml_db)
lutron_client.connect()
_LOGGER.debug("Connected to main repeater at %s", host)
entity_registry = er.async_get(hass)

View File

@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pylutron"],
"requirements": ["pylutron==0.4.1"],
"requirements": ["pylutron==0.4.0"],
"single_config_entry": true
}

Some files were not shown because too many files have changed in this diff Show More