Compare commits

..

1 Commits

Author SHA1 Message Date
abmantis
8bd5eed295 Cache flattened service descriptions in websocket api 2025-11-29 00:10:38 +00:00
41 changed files with 175 additions and 121 deletions

View File

@@ -7,7 +7,6 @@ from typing import Any, Final
from homeassistant.const import (
EVENT_COMPONENT_LOADED,
EVENT_CORE_CONFIG_UPDATE,
EVENT_LABS_UPDATED,
EVENT_LOVELACE_UPDATED,
EVENT_PANELS_UPDATED,
EVENT_RECORDER_5MIN_STATISTICS_GENERATED,
@@ -46,7 +45,6 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
EVENT_STATE_CHANGED,
EVENT_THEMES_UPDATED,
EVENT_LABEL_REGISTRY_UPDATED,
EVENT_LABS_UPDATED,
EVENT_CATEGORY_REGISTRY_UPDATED,
EVENT_FLOOR_REGISTRY_UPDATED,
}

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from aiohttp import CookieJar
from pyanglianwater import AnglianWater
from pyanglianwater.auth import MSOB2CAuth
from pyanglianwater.exceptions import (
@@ -19,7 +18,7 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
@@ -34,10 +33,7 @@ async def async_setup_entry(
auth = MSOB2CAuth(
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=async_create_clientsession(
hass,
cookie_jar=CookieJar(quote_cookie=False),
),
session=async_get_clientsession(hass),
refresh_token=entry.data[CONF_ACCESS_TOKEN],
account_number=entry.data[CONF_ACCOUNT_NUMBER],
)

View File

@@ -421,8 +421,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
)
if short_form.search(model_alias):
model_alias += "-0"
if model_alias.endswith(("haiku", "opus", "sonnet")):
model_alias += "-latest"
model_options.append(
SelectOptionDict(
label=model_info.display_name,

View File

@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["anthropic==0.75.0"]
"requirements": ["anthropic==0.73.0"]
}

View File

@@ -17,7 +17,7 @@ DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
DEFAULT_PORT: Final = 6053
STABLE_BLE_VERSION_STR = "2025.11.0"
STABLE_BLE_VERSION_STR = "2025.8.0"
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
PROJECT_URLS = {
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",

View File

@@ -157,7 +157,7 @@
"title": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::title%]"
},
"ble_firmware_outdated": {
"description": "ESPHome {version} introduces ultra-low latency event processing, reducing BLE event delays from 0-16 milliseconds to approximately 12 microseconds. This resolves stability issues when pairing, connecting, or handshaking with devices that require low latency, and makes Bluetooth proxy operations rival or exceed local adapters. We highly recommend updating {name} to take advantage of these improvements.",
"description": "To improve Bluetooth reliability and performance, we highly recommend updating {name} with ESPHome {version} or later. When updating the device from ESPHome earlier than 2022.12.0, it is recommended to use a serial cable instead of an over-the-air update to take advantage of the new partition scheme.",
"title": "Update {name} with ESPHome {version} or later"
},
"device_conflict": {

View File

@@ -10,7 +10,6 @@ from __future__ import annotations
from collections.abc import Callable
import logging
from homeassistant.const import EVENT_LABS_UPDATED
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
from homeassistant.helpers import config_validation as cv
@@ -18,7 +17,7 @@ from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_custom_components
from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
from .const import DOMAIN, EVENT_LABS_UPDATED, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
from .models import (
EventLabsUpdatedData,
LabPreviewFeature,

View File

@@ -11,4 +11,6 @@ DOMAIN = "labs"
STORAGE_KEY = "core.labs"
STORAGE_VERSION = 1
EVENT_LABS_UPDATED = "labs_updated"
LABS_DATA: HassKey[LabsData] = HassKey(DOMAIN)

View File

@@ -8,10 +8,9 @@ import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.backup import async_get_manager
from homeassistant.const import EVENT_LABS_UPDATED
from homeassistant.core import HomeAssistant, callback
from .const import LABS_DATA
from .const import EVENT_LABS_UPDATED, LABS_DATA
from .models import EventLabsUpdatedData

View File

@@ -1486,7 +1486,6 @@ class MqttEntity(
entity_registry.async_update_entity(
self.entity_id, new_entity_id=self._update_registry_entity_id
)
self._update_registry_entity_id = None
await super().async_added_to_hass()
self._subscriptions = {}

View File

@@ -729,8 +729,8 @@
"data_description": {
"payload_reset_percentage": "A special payload that resets the fan speed percentage state attribute to unknown when received at the percentage state topic.",
"percentage_command_template": "A [template]({command_templating_url}) to compose the payload to be published at the percentage command topic.",
"percentage_command_topic": "The MQTT topic to publish commands to change the fan speed state based on a percentage setting. The value shall be in the range from \"speed range min\" to \"speed range max\". [Learn more.]({url}#percentage_command_topic)",
"percentage_state_topic": "The MQTT topic subscribed to receive fan speed state. This is a value in the range from \"speed range min\" to \"speed range max\". [Learn more.]({url}#percentage_state_topic)",
"percentage_command_topic": "The MQTT topic to publish commands to change the fan speed state based on a percentage. [Learn more.]({url}#percentage_command_topic)",
"percentage_state_topic": "The MQTT topic subscribed to receive fan speed based on percentage. [Learn more.]({url}#percentage_state_topic)",
"percentage_value_template": "Defines a [template]({value_templating_url}) to extract the speed percentage value.",
"speed_range_max": "The maximum of numeric output range (representing 100 %). The percentage step is 100 / number of speeds within the \"speed range\".",
"speed_range_min": "The minimum of numeric output range (off not included, so speed_range_min - 1 represents 0 %). The percentage step is 100 / the number of speeds within the \"speed range\"."

View File

@@ -19,7 +19,7 @@
"loggers": ["roborock"],
"quality_scale": "silver",
"requirements": [
"python-roborock==3.8.4",
"python-roborock==3.8.1",
"vacuum-map-parser-roborock==0.1.4"
]
}

View File

@@ -30,7 +30,7 @@ from .entity import (
ShellyRpcAttributeEntity,
ShellySleepingBlockAttributeEntity,
ShellySleepingRpcAttributeEntity,
async_setup_entry_block,
async_setup_entry_attribute_entities,
async_setup_entry_rest,
async_setup_entry_rpc,
)
@@ -372,7 +372,7 @@ def _async_setup_block_entry(
) -> None:
"""Set up entities for BLOCK device."""
if config_entry.data[CONF_SLEEP_PERIOD]:
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass,
config_entry,
async_add_entities,
@@ -380,7 +380,7 @@ def _async_setup_block_entry(
BlockSleepingBinarySensor,
)
else:
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass,
config_entry,
async_add_entities,

View File

@@ -27,7 +27,7 @@ from .entity import (
RpcEntityDescription,
ShellyBlockAttributeEntity,
ShellyRpcAttributeEntity,
async_setup_entry_block,
async_setup_entry_attribute_entities,
async_setup_entry_rpc,
rpc_call,
)
@@ -81,7 +81,7 @@ def _async_setup_block_entry(
coordinator = config_entry.runtime_data.block
assert coordinator
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass, config_entry, async_add_entities, BLOCK_COVERS, BlockShellyCover
)

View File

@@ -34,14 +34,14 @@ from .utils import (
@callback
def async_setup_entry_block(
def async_setup_entry_attribute_entities(
hass: HomeAssistant,
config_entry: ShellyConfigEntry,
async_add_entities: AddEntitiesCallback,
sensors: Mapping[tuple[str, str], BlockEntityDescription],
sensor_class: Callable,
) -> None:
"""Set up block entities."""
"""Set up entities for attributes."""
coordinator = config_entry.runtime_data.block
assert coordinator
if coordinator.device.initialized:
@@ -150,7 +150,7 @@ def async_setup_entry_rpc(
sensors: Mapping[str, RpcEntityDescription],
sensor_class: Callable,
) -> None:
"""Set up RPC entities."""
"""Set up entities for RPC sensors."""
coordinator = config_entry.runtime_data.rpc
assert coordinator

View File

@@ -18,6 +18,7 @@ from homeassistant.components.event import (
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
BASIC_INPUTS_EVENTS_TYPES,
@@ -25,7 +26,7 @@ from .const import (
SHIX3_1_INPUTS_EVENTS_TYPES,
)
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
from .entity import ShellyBlockEntity, ShellyRpcEntity
from .entity import ShellyBlockEntity, get_entity_rpc_device_info
from .utils import (
async_remove_orphaned_entities,
async_remove_shelly_entity,
@@ -135,7 +136,7 @@ def _async_setup_rpc_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up entities for RPC device."""
entities: list[ShellyRpcEvent | ShellyRpcScriptEvent] = []
entities: list[ShellyRpcEvent] = []
coordinator = config_entry.runtime_data.rpc
if TYPE_CHECKING:
@@ -161,9 +162,7 @@ def _async_setup_rpc_entry(
continue
if script_events and (event_types := script_events[get_rpc_key_id(script)]):
entities.append(
ShellyRpcScriptEvent(coordinator, script, SCRIPT_EVENT, event_types)
)
entities.append(ShellyRpcScriptEvent(coordinator, script, event_types))
# If a script is removed, from the device configuration, we need to remove orphaned entities
async_remove_orphaned_entities(
@@ -228,7 +227,7 @@ class ShellyBlockEvent(ShellyBlockEntity, EventEntity):
self.async_write_ha_state()
class ShellyRpcEvent(ShellyRpcEntity, EventEntity):
class ShellyRpcEvent(CoordinatorEntity[ShellyRpcCoordinator], EventEntity):
"""Represent RPC event entity."""
_attr_has_entity_name = True
@@ -241,19 +240,25 @@ class ShellyRpcEvent(ShellyRpcEntity, EventEntity):
description: ShellyRpcEventDescription,
) -> None:
"""Initialize Shelly entity."""
super().__init__(coordinator, key)
super().__init__(coordinator)
self._attr_device_info = get_entity_rpc_device_info(coordinator, key)
self._attr_unique_id = f"{coordinator.mac}-{key}"
self.entity_description = description
_, component, component_id = get_rpc_key(key)
if custom_name := get_rpc_custom_name(coordinator.device, key):
self._attr_name = custom_name
else:
self._attr_translation_placeholders = {
"input_number": component_id
if get_rpc_number_of_channels(coordinator.device, component) > 1
else ""
}
self.event_id = int(component_id)
if description.key == "input":
_, component, component_id = get_rpc_key(key)
if custom_name := get_rpc_custom_name(coordinator.device, key):
self._attr_name = custom_name
else:
self._attr_translation_placeholders = {
"input_number": component_id
if get_rpc_number_of_channels(coordinator.device, component) > 1
else ""
}
self.event_id = int(component_id)
elif description.key == "script":
self._attr_name = get_rpc_custom_name(coordinator.device, key)
self.event_id = get_rpc_key_id(key)
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
@@ -265,36 +270,30 @@ class ShellyRpcEvent(ShellyRpcEntity, EventEntity):
@callback
def _async_handle_event(self, event: dict[str, Any]) -> None:
"""Handle the event."""
"""Handle the demo button event."""
if event["id"] == self.event_id:
self._trigger_event(event["event"])
self.async_write_ha_state()
class ShellyRpcScriptEvent(ShellyRpcEntity, EventEntity):
class ShellyRpcScriptEvent(ShellyRpcEvent):
"""Represent RPC script event entity."""
_attr_has_entity_name = True
entity_description: ShellyRpcEventDescription
def __init__(
self,
coordinator: ShellyRpcCoordinator,
key: str,
description: ShellyRpcEventDescription,
event_types: list[str],
) -> None:
"""Initialize Shelly script event entity."""
super().__init__(coordinator, key)
self.entity_description = description
self._attr_event_types = event_types
super().__init__(coordinator, key, SCRIPT_EVENT)
self._attr_name = get_rpc_custom_name(coordinator.device, key)
self.event_id = get_rpc_key_id(key)
self.component = key
self._attr_event_types = event_types
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
await super(CoordinatorEntity, self).async_added_to_hass()
self.async_on_remove(
self.coordinator.async_subscribe_events(self._async_handle_event)
@@ -303,7 +302,7 @@ class ShellyRpcScriptEvent(ShellyRpcEntity, EventEntity):
@callback
def _async_handle_event(self, event: dict[str, Any]) -> None:
"""Handle script event."""
if event.get("component") == self.key:
if event.get("component") == self.component:
event_type = event.get("event")
if event_type not in self.event_types:
# This can happen if we didn't find this event type in the script

View File

@@ -44,7 +44,7 @@ from .entity import (
RpcEntityDescription,
ShellyBlockAttributeEntity,
ShellyRpcAttributeEntity,
async_setup_entry_block,
async_setup_entry_attribute_entities,
async_setup_entry_rpc,
)
from .utils import (
@@ -101,7 +101,7 @@ def _async_setup_block_entry(
coordinator = config_entry.runtime_data.block
assert coordinator
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass, config_entry, async_add_entities, BLOCK_LIGHTS, BlockShellyLight
)

View File

@@ -42,7 +42,7 @@ from .entity import (
RpcEntityDescription,
ShellyRpcAttributeEntity,
ShellySleepingBlockAttributeEntity,
async_setup_entry_block,
async_setup_entry_attribute_entities,
async_setup_entry_rpc,
rpc_call,
)
@@ -353,7 +353,7 @@ def _async_setup_block_entry(
) -> None:
"""Set up entities for BLOCK device."""
if config_entry.data[CONF_SLEEP_PERIOD]:
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass,
config_entry,
async_add_entities,

View File

@@ -53,7 +53,7 @@ from .entity import (
ShellyRpcAttributeEntity,
ShellySleepingBlockAttributeEntity,
ShellySleepingRpcAttributeEntity,
async_setup_entry_block,
async_setup_entry_attribute_entities,
async_setup_entry_rest,
async_setup_entry_rpc,
get_entity_rpc_device_info,
@@ -1736,7 +1736,7 @@ def _async_setup_block_entry(
) -> None:
"""Set up entities for BLOCK device."""
if config_entry.data[CONF_SLEEP_PERIOD]:
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass,
config_entry,
async_add_entities,
@@ -1744,7 +1744,7 @@ def _async_setup_block_entry(
BlockSleepingSensor,
)
else:
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass,
config_entry,
async_add_entities,

View File

@@ -36,7 +36,7 @@ from .entity import (
ShellyBlockAttributeEntity,
ShellyRpcAttributeEntity,
ShellySleepingBlockAttributeEntity,
async_setup_entry_block,
async_setup_entry_attribute_entities,
async_setup_entry_rpc,
rpc_call,
)
@@ -337,11 +337,11 @@ def _async_setup_block_entry(
coordinator = config_entry.runtime_data.block
assert coordinator
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass, config_entry, async_add_entities, BLOCK_RELAY_SWITCHES, BlockRelaySwitch
)
async_setup_entry_block(
async_setup_entry_attribute_entities(
hass,
config_entry,
async_add_entities,

View File

@@ -39,7 +39,6 @@ from .entity import (
)
_KEY_DOOR = "door"
PARALLEL_UPDATES = 0
@dataclasses.dataclass(frozen=True, kw_only=True)

View File

@@ -33,7 +33,6 @@ from .entity import (
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)

View File

@@ -32,7 +32,6 @@ from .entity import ProtectDeviceEntity
from .utils import get_camera_base_name
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
@callback

View File

@@ -40,8 +40,6 @@ from .data import (
)
from .entity import EventEntityMixin, ProtectDeviceEntity, ProtectEventMixin
PARALLEL_UPDATES = 0
# Select best thumbnail
# Prefer thumbnails with LPR data, sorted by confidence

View File

@@ -15,7 +15,6 @@ from .data import ProtectDeviceType, UFPConfigEntry
from .entity import ProtectDeviceEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
async def async_setup_entry(

View File

@@ -20,7 +20,6 @@ from .data import ProtectDeviceType, UFPConfigEntry
from .entity import ProtectDeviceEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
async def async_setup_entry(

View File

@@ -40,7 +40,7 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["uiprotect", "unifi_discovery"],
"requirements": ["uiprotect==7.31.0", "unifi-discovery==1.2.0"],
"requirements": ["uiprotect==7.29.0", "unifi-discovery==1.2.0"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

View File

@@ -27,7 +27,6 @@ from .data import ProtectDeviceType, UFPConfigEntry
from .entity import ProtectDeviceEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
_SPEAKER_DESCRIPTION = MediaPlayerEntityDescription(
key="speaker",

View File

@@ -29,8 +29,6 @@ from .entity import (
async_all_device_entities,
)
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class ProtectNumberEntityDescription(

View File

@@ -45,7 +45,6 @@ from .utils import async_get_light_motion_current
_LOGGER = logging.getLogger(__name__)
_KEY_LIGHT_MOTION = "light_motion"
PARALLEL_UPDATES = 0
HDR_MODES = [
{"id": "always", "name": "Always On"},

View File

@@ -55,7 +55,6 @@ from .utils import async_get_light_motion_current
_LOGGER = logging.getLogger(__name__)
OBJECT_TYPE_NONE = "none"
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)

View File

@@ -36,7 +36,6 @@ from .entity import (
ATTR_PREV_MIC = "prev_mic_level"
ATTR_PREV_RECORD = "prev_record_mode"
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)

View File

@@ -27,8 +27,6 @@ from .entity import (
async_all_device_entities,
)
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class ProtectTextEntityDescription(ProtectSetableKeysMixin[T], TextEntityDescription):

View File

@@ -24,9 +24,14 @@ from homeassistant.helpers.trigger import (
async_get_all_descriptions as async_get_all_trigger_descriptions,
)
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
_LOGGER = logging.getLogger(__name__)
FLAT_SERVICE_DESCRIPTIONS_CACHE: HassKey[
tuple[dict[str, dict[str, Any]], dict[str, dict[str, Any] | None]]
] = HassKey("websocket_automation_flat_service_description_cache")
@dataclass(slots=True, kw_only=True)
class _EntityFilter:
@@ -217,12 +222,29 @@ async def async_get_services_for_target(
) -> set[str]:
"""Get services for a target."""
descriptions = await async_get_all_service_descriptions(hass)
# Flatten dicts to be keyed by domain.name to match trigger/condition format
descriptions_flatten = {
f"{domain}.{service_name}": desc
for domain, services in descriptions.items()
for service_name, desc in services.items()
}
def get_flattened_service_descriptions() -> dict[str, dict[str, Any] | None]:
"""Get flattened service descriptions, with caching."""
if FLAT_SERVICE_DESCRIPTIONS_CACHE in hass.data:
cached_descriptions, cached_flat_descriptions = hass.data[
FLAT_SERVICE_DESCRIPTIONS_CACHE
]
# If the descriptions are the same, return the cached flattened version
if cached_descriptions is descriptions:
return cached_flat_descriptions
# Flatten dicts to be keyed by domain.name to match trigger/condition format
flat_descriptions = {
f"{domain}.{service_name}": desc
for domain, services in descriptions.items()
for service_name, desc in services.items()
}
hass.data[FLAT_SERVICE_DESCRIPTIONS_CACHE] = (
descriptions,
flat_descriptions,
)
return flat_descriptions
return _async_get_automation_components_for_target(
hass, target_selector, expand_group, descriptions_flatten
hass, target_selector, expand_group, get_flattened_service_descriptions()
)

View File

@@ -271,7 +271,6 @@ EVENT_HOMEASSISTANT_STOP: EventType[NoEventData] = EventType("homeassistant_stop
EVENT_HOMEASSISTANT_FINAL_WRITE: EventType[NoEventData] = EventType(
"homeassistant_final_write"
)
EVENT_LABS_UPDATED: Final = "labs_updated"
EVENT_LOGBOOK_ENTRY: Final = "logbook_entry"
EVENT_LOGGING_CHANGED: Final = "logging_changed"
EVENT_SERVICE_REGISTERED: Final = "service_registered"

6
requirements_all.txt generated
View File

@@ -504,7 +504,7 @@ anova-wifi==0.17.0
anthemav==1.4.1
# homeassistant.components.anthropic
anthropic==0.75.0
anthropic==0.73.0
# homeassistant.components.mcp_server
anyio==4.10.0
@@ -2560,7 +2560,7 @@ python-rabbitair==0.0.8
python-ripple-api==0.0.3
# homeassistant.components.roborock
python-roborock==3.8.4
python-roborock==3.8.1
# homeassistant.components.smarttub
python-smarttub==0.0.45
@@ -3053,7 +3053,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
uiprotect==7.31.0
uiprotect==7.29.0
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7

View File

@@ -480,7 +480,7 @@ anova-wifi==0.17.0
anthemav==1.4.1
# homeassistant.components.anthropic
anthropic==0.75.0
anthropic==0.73.0
# homeassistant.components.mcp_server
anyio==4.10.0
@@ -2138,7 +2138,7 @@ python-pooldose==0.8.0
python-rabbitair==0.0.8
# homeassistant.components.roborock
python-roborock==3.8.4
python-roborock==3.8.1
# homeassistant.components.smarttub
python-smarttub==0.0.45
@@ -2538,7 +2538,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
uiprotect==7.31.0
uiprotect==7.29.0
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7

View File

@@ -128,12 +128,6 @@ async def mock_init_component(
"""Initialize integration."""
model_list = AsyncPage(
data=[
ModelInfo(
id="claude-opus-4-5-20251101",
created_at=datetime.datetime(2025, 11, 1, 0, 0, tzinfo=datetime.UTC),
display_name="Claude Opus 4.5",
type="model",
),
ModelInfo(
id="claude-haiku-4-5-20251001",
created_at=datetime.datetime(2025, 10, 15, 0, 0, tzinfo=datetime.UTC),

View File

@@ -357,10 +357,6 @@ async def test_model_list(
assert options["type"] == FlowResultType.FORM
assert options["step_id"] == "advanced"
assert options["data_schema"].schema["chat_model"].config["options"] == [
{
"label": "Claude Opus 4.5",
"value": "claude-opus-4-5",
},
{
"label": "Claude Haiku 4.5",
"value": "claude-haiku-4-5",
@@ -383,11 +379,11 @@ async def test_model_list(
},
{
"label": "Claude Sonnet 3.7",
"value": "claude-3-7-sonnet-latest",
"value": "claude-3-7-sonnet",
},
{
"label": "Claude Haiku 3.5",
"value": "claude-3-5-haiku-latest",
"value": "claude-3-5-haiku",
},
{
"label": "Claude Haiku 3",

View File

@@ -1584,7 +1584,6 @@ async def test_discovery_with_object_id(
async def test_discovery_with_default_entity_id_for_previous_deleted_entity(
hass: HomeAssistant,
mqtt_mock_entry: MqttMockHAClientGenerator,
entity_registry: er.EntityRegistry,
) -> None:
"""Test discovering an MQTT entity with default_entity_id and unique_id."""
@@ -1599,7 +1598,6 @@ async def test_discovery_with_default_entity_id_for_previous_deleted_entity(
)
initial_entity_id = "sensor.hello_id"
new_entity_id = "sensor.updated_hello_id"
later_entity_id = "sensor.later_hello_id"
name = "Hello World 11"
domain = "sensor"
@@ -1628,14 +1626,6 @@ async def test_discovery_with_default_entity_id_for_previous_deleted_entity(
assert state.name == name
assert (domain, "object bla") in hass.data["mqtt"].discovery_already_discovered
# Assert the entity ID can be changed later
entity_registry.async_update_entity(new_entity_id, new_entity_id=later_entity_id)
await hass.async_block_till_done()
state = hass.states.get(later_entity_id)
assert state is not None
assert state.name == name
async def test_discovery_incl_nodeid(
hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator

View File

@@ -4162,3 +4162,81 @@ async def test_get_services_for_target(
"switch.turn_on",
],
)
@patch("annotatedyaml.loader.load_yaml")
@patch.object(Integration, "has_services", return_value=True)
async def test_get_services_for_target_caching(
mock_has_services: Mock,
mock_load_yaml: Mock,
hass: HomeAssistant,
websocket_client: MockHAClientWebSocket,
) -> None:
"""Test that flattened service descriptions are cached and reused."""
def get_common_service_descriptions(domain: str):
return f"""
turn_on:
target:
entity:
domain: {domain}
"""
def _load_yaml(fname, secrets=None):
domain = fname.split("/")[-2]
with io.StringIO(get_common_service_descriptions(domain)) as file:
return parse_yaml(file)
mock_load_yaml.side_effect = _load_yaml
await hass.async_block_till_done()
hass.services.async_register("light", "turn_on", lambda call: None)
hass.services.async_register("switch", "turn_on", lambda call: None)
await hass.async_block_till_done()
async def call_command():
await websocket_client.send_json_auto_id(
{
"type": "get_services_for_target",
"target": {"entity_id": ["light.test1"]},
}
)
msg = await websocket_client.receive_json()
assert msg["success"]
with patch(
"homeassistant.components.websocket_api.automation._async_get_automation_components_for_target",
return_value=set(),
) as mock_get_components:
# First call: should create and cache flat descriptions
await call_command()
assert mock_get_components.call_count == 1
first_flat_descriptions = mock_get_components.call_args_list[0][0][3]
assert first_flat_descriptions == {
"light.turn_on": {
"fields": {},
"target": {"entity": [{"domain": ["light"]}]},
},
"switch.turn_on": {
"fields": {},
"target": {"entity": [{"domain": ["switch"]}]},
},
}
# Second call: should reuse cached flat descriptions
await call_command()
assert mock_get_components.call_count == 2
second_flat_descriptions = mock_get_components.call_args_list[1][0][3]
assert first_flat_descriptions is second_flat_descriptions
# Register a new service to invalidate cache
hass.services.async_register("new_domain", "new_service", lambda call: None)
await hass.async_block_till_done()
# Third call: cache should be rebuilt
await call_command()
assert mock_get_components.call_count == 3
third_flat_descriptions = mock_get_components.call_args_list[2][0][3]
assert "new_domain.new_service" in third_flat_descriptions
assert third_flat_descriptions is not first_flat_descriptions