mirror of
https://github.com/home-assistant/core.git
synced 2025-04-28 03:07:50 +00:00
Handle entity creation on new added zwave_js value (#55987)
* Handle new entity creation when a new value is added * spacing * Update homeassistant/components/zwave_js/__init__.py Co-authored-by: Martin Hjelmare <marhje52@gmail.com> * change variable name and use asyncio.gather * Centralized where discovered value IDs gets managed Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
This commit is contained in:
parent
ac1251c52b
commit
c785983cce
@ -68,7 +68,11 @@ from .const import (
|
|||||||
ZWAVE_JS_VALUE_NOTIFICATION_EVENT,
|
ZWAVE_JS_VALUE_NOTIFICATION_EVENT,
|
||||||
ZWAVE_JS_VALUE_UPDATED_EVENT,
|
ZWAVE_JS_VALUE_UPDATED_EVENT,
|
||||||
)
|
)
|
||||||
from .discovery import ZwaveDiscoveryInfo, async_discover_values
|
from .discovery import (
|
||||||
|
ZwaveDiscoveryInfo,
|
||||||
|
async_discover_node_values,
|
||||||
|
async_discover_single_value,
|
||||||
|
)
|
||||||
from .helpers import async_enable_statistics, get_device_id, get_unique_id
|
from .helpers import async_enable_statistics, get_device_id, get_unique_id
|
||||||
from .migrate import async_migrate_discovered_value
|
from .migrate import async_migrate_discovered_value
|
||||||
from .services import ZWaveServices
|
from .services import ZWaveServices
|
||||||
@ -129,13 +133,60 @@ async def async_setup_entry( # noqa: C901
|
|||||||
entry_hass_data[DATA_PLATFORM_SETUP] = {}
|
entry_hass_data[DATA_PLATFORM_SETUP] = {}
|
||||||
|
|
||||||
registered_unique_ids: dict[str, dict[str, set[str]]] = defaultdict(dict)
|
registered_unique_ids: dict[str, dict[str, set[str]]] = defaultdict(dict)
|
||||||
|
discovered_value_ids: dict[str, set[str]] = defaultdict(set)
|
||||||
|
|
||||||
|
async def async_handle_discovery_info(
|
||||||
|
device: device_registry.DeviceEntry,
|
||||||
|
disc_info: ZwaveDiscoveryInfo,
|
||||||
|
value_updates_disc_info: dict[str, ZwaveDiscoveryInfo],
|
||||||
|
) -> None:
|
||||||
|
"""Handle discovery info and all dependent tasks."""
|
||||||
|
# This migration logic was added in 2021.3 to handle a breaking change to
|
||||||
|
# the value_id format. Some time in the future, this call (as well as the
|
||||||
|
# helper functions) can be removed.
|
||||||
|
async_migrate_discovered_value(
|
||||||
|
hass,
|
||||||
|
ent_reg,
|
||||||
|
registered_unique_ids[device.id][disc_info.platform],
|
||||||
|
device,
|
||||||
|
client,
|
||||||
|
disc_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
platform_setup_tasks = entry_hass_data[DATA_PLATFORM_SETUP]
|
||||||
|
platform = disc_info.platform
|
||||||
|
if platform not in platform_setup_tasks:
|
||||||
|
platform_setup_tasks[platform] = hass.async_create_task(
|
||||||
|
hass.config_entries.async_forward_entry_setup(entry, platform)
|
||||||
|
)
|
||||||
|
await platform_setup_tasks[platform]
|
||||||
|
|
||||||
|
LOGGER.debug("Discovered entity: %s", disc_info)
|
||||||
|
async_dispatcher_send(
|
||||||
|
hass, f"{DOMAIN}_{entry.entry_id}_add_{platform}", disc_info
|
||||||
|
)
|
||||||
|
|
||||||
|
# If we don't need to watch for updates return early
|
||||||
|
if not disc_info.assumed_state:
|
||||||
|
return
|
||||||
|
value_updates_disc_info[disc_info.primary_value.value_id] = disc_info
|
||||||
|
# If this is the first time we found a value we want to watch for updates,
|
||||||
|
# return early
|
||||||
|
if len(value_updates_disc_info) != 1:
|
||||||
|
return
|
||||||
|
# add listener for value updated events
|
||||||
|
entry.async_on_unload(
|
||||||
|
disc_info.node.on(
|
||||||
|
"value updated",
|
||||||
|
lambda event: async_on_value_updated_fire_event(
|
||||||
|
value_updates_disc_info, event["value"]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
async def async_on_node_ready(node: ZwaveNode) -> None:
|
async def async_on_node_ready(node: ZwaveNode) -> None:
|
||||||
"""Handle node ready event."""
|
"""Handle node ready event."""
|
||||||
LOGGER.debug("Processing node %s", node)
|
LOGGER.debug("Processing node %s", node)
|
||||||
|
|
||||||
platform_setup_tasks = entry_hass_data[DATA_PLATFORM_SETUP]
|
|
||||||
|
|
||||||
# register (or update) node in device registry
|
# register (or update) node in device registry
|
||||||
device = register_node_in_dev_reg(hass, entry, dev_reg, client, node)
|
device = register_node_in_dev_reg(hass, entry, dev_reg, client, node)
|
||||||
# We only want to create the defaultdict once, even on reinterviews
|
# We only want to create the defaultdict once, even on reinterviews
|
||||||
@ -145,44 +196,22 @@ async def async_setup_entry( # noqa: C901
|
|||||||
value_updates_disc_info: dict[str, ZwaveDiscoveryInfo] = {}
|
value_updates_disc_info: dict[str, ZwaveDiscoveryInfo] = {}
|
||||||
|
|
||||||
# run discovery on all node values and create/update entities
|
# run discovery on all node values and create/update entities
|
||||||
for disc_info in async_discover_values(node, device):
|
await asyncio.gather(
|
||||||
platform = disc_info.platform
|
*(
|
||||||
|
async_handle_discovery_info(device, disc_info, value_updates_disc_info)
|
||||||
# This migration logic was added in 2021.3 to handle a breaking change to
|
for disc_info in async_discover_node_values(
|
||||||
# the value_id format. Some time in the future, this call (as well as the
|
node, device, discovered_value_ids
|
||||||
# helper functions) can be removed.
|
)
|
||||||
async_migrate_discovered_value(
|
)
|
||||||
hass,
|
|
||||||
ent_reg,
|
|
||||||
registered_unique_ids[device.id][platform],
|
|
||||||
device,
|
|
||||||
client,
|
|
||||||
disc_info,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if platform not in platform_setup_tasks:
|
# add listeners to handle new values that get added later
|
||||||
platform_setup_tasks[platform] = hass.async_create_task(
|
for event in ("value added", "value updated", "metadata updated"):
|
||||||
hass.config_entries.async_forward_entry_setup(entry, platform)
|
|
||||||
)
|
|
||||||
|
|
||||||
await platform_setup_tasks[platform]
|
|
||||||
|
|
||||||
LOGGER.debug("Discovered entity: %s", disc_info)
|
|
||||||
async_dispatcher_send(
|
|
||||||
hass, f"{DOMAIN}_{entry.entry_id}_add_{platform}", disc_info
|
|
||||||
)
|
|
||||||
|
|
||||||
# Capture discovery info for values we want to watch for updates
|
|
||||||
if disc_info.assumed_state:
|
|
||||||
value_updates_disc_info[disc_info.primary_value.value_id] = disc_info
|
|
||||||
|
|
||||||
# add listener for value updated events if necessary
|
|
||||||
if value_updates_disc_info:
|
|
||||||
entry.async_on_unload(
|
entry.async_on_unload(
|
||||||
node.on(
|
node.on(
|
||||||
"value updated",
|
event,
|
||||||
lambda event: async_on_value_updated(
|
lambda event: hass.async_create_task(
|
||||||
value_updates_disc_info, event["value"]
|
async_on_value_added(value_updates_disc_info, event["value"])
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -238,6 +267,31 @@ async def async_setup_entry( # noqa: C901
|
|||||||
# some visual feedback that something is (in the process of) being added
|
# some visual feedback that something is (in the process of) being added
|
||||||
register_node_in_dev_reg(hass, entry, dev_reg, client, node)
|
register_node_in_dev_reg(hass, entry, dev_reg, client, node)
|
||||||
|
|
||||||
|
async def async_on_value_added(
|
||||||
|
value_updates_disc_info: dict[str, ZwaveDiscoveryInfo], value: Value
|
||||||
|
) -> None:
|
||||||
|
"""Fire value updated event."""
|
||||||
|
# If node isn't ready or a device for this node doesn't already exist, we can
|
||||||
|
# let the node ready event handler perform discovery. If a value has already
|
||||||
|
# been processed, we don't need to do it again
|
||||||
|
device_id = get_device_id(client, value.node)
|
||||||
|
if (
|
||||||
|
not value.node.ready
|
||||||
|
or not (device := dev_reg.async_get_device({device_id}))
|
||||||
|
or value.value_id in discovered_value_ids[device.id]
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
LOGGER.debug("Processing node %s added value %s", value.node, value)
|
||||||
|
await asyncio.gather(
|
||||||
|
*(
|
||||||
|
async_handle_discovery_info(device, disc_info, value_updates_disc_info)
|
||||||
|
for disc_info in async_discover_single_value(
|
||||||
|
value, device, discovered_value_ids
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_on_node_removed(node: ZwaveNode) -> None:
|
def async_on_node_removed(node: ZwaveNode) -> None:
|
||||||
"""Handle node removed event."""
|
"""Handle node removed event."""
|
||||||
@ -247,6 +301,7 @@ async def async_setup_entry( # noqa: C901
|
|||||||
# note: removal of entity registry entry is handled by core
|
# note: removal of entity registry entry is handled by core
|
||||||
dev_reg.async_remove_device(device.id) # type: ignore
|
dev_reg.async_remove_device(device.id) # type: ignore
|
||||||
registered_unique_ids.pop(device.id, None) # type: ignore
|
registered_unique_ids.pop(device.id, None) # type: ignore
|
||||||
|
discovered_value_ids.pop(device.id, None) # type: ignore
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_on_value_notification(notification: ValueNotification) -> None:
|
def async_on_value_notification(notification: ValueNotification) -> None:
|
||||||
@ -313,7 +368,7 @@ async def async_setup_entry( # noqa: C901
|
|||||||
hass.bus.async_fire(ZWAVE_JS_NOTIFICATION_EVENT, event_data)
|
hass.bus.async_fire(ZWAVE_JS_NOTIFICATION_EVENT, event_data)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_on_value_updated(
|
def async_on_value_updated_fire_event(
|
||||||
value_updates_disc_info: dict[str, ZwaveDiscoveryInfo], value: Value
|
value_updates_disc_info: dict[str, ZwaveDiscoveryInfo], value: Value
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Fire value updated event."""
|
"""Fire value updated event."""
|
||||||
|
@ -671,11 +671,23 @@ DISCOVERY_SCHEMAS = [
|
|||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_discover_values(
|
def async_discover_node_values(
|
||||||
node: ZwaveNode, device: DeviceEntry
|
node: ZwaveNode, device: DeviceEntry, discovered_value_ids: dict[str, set[str]]
|
||||||
) -> Generator[ZwaveDiscoveryInfo, None, None]:
|
) -> Generator[ZwaveDiscoveryInfo, None, None]:
|
||||||
"""Run discovery on ZWave node and return matching (primary) values."""
|
"""Run discovery on ZWave node and return matching (primary) values."""
|
||||||
for value in node.values.values():
|
for value in node.values.values():
|
||||||
|
# We don't need to rediscover an already processed value_id
|
||||||
|
if value.value_id in discovered_value_ids[device.id]:
|
||||||
|
continue
|
||||||
|
yield from async_discover_single_value(value, device, discovered_value_ids)
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_discover_single_value(
|
||||||
|
value: ZwaveValue, device: DeviceEntry, discovered_value_ids: dict[str, set[str]]
|
||||||
|
) -> Generator[ZwaveDiscoveryInfo, None, None]:
|
||||||
|
"""Run discovery on a single ZWave value and return matching schema info."""
|
||||||
|
discovered_value_ids[device.id].add(value.value_id)
|
||||||
for schema in DISCOVERY_SCHEMAS:
|
for schema in DISCOVERY_SCHEMAS:
|
||||||
# check manufacturer_id
|
# check manufacturer_id
|
||||||
if (
|
if (
|
||||||
@ -744,14 +756,14 @@ def async_discover_values(
|
|||||||
|
|
||||||
# check additional required values
|
# check additional required values
|
||||||
if schema.required_values is not None and not all(
|
if schema.required_values is not None and not all(
|
||||||
any(check_value(val, val_scheme) for val in node.values.values())
|
any(check_value(val, val_scheme) for val in value.node.values.values())
|
||||||
for val_scheme in schema.required_values
|
for val_scheme in schema.required_values
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# check for values that may not be present
|
# check for values that may not be present
|
||||||
if schema.absent_values is not None and any(
|
if schema.absent_values is not None and any(
|
||||||
any(check_value(val, val_scheme) for val in node.values.values())
|
any(check_value(val, val_scheme) for val in value.node.values.values())
|
||||||
for val_scheme in schema.absent_values
|
for val_scheme in schema.absent_values
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
@ -767,7 +779,7 @@ def async_discover_values(
|
|||||||
"Discovery for value %s on device '%s' (%s) will be skipped: %s",
|
"Discovery for value %s on device '%s' (%s) will be skipped: %s",
|
||||||
value,
|
value,
|
||||||
device.name_by_user or device.name,
|
device.name_by_user or device.name,
|
||||||
node,
|
value.node,
|
||||||
err,
|
err,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
@ -789,8 +801,8 @@ def async_discover_values(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not schema.allow_multi:
|
if not schema.allow_multi:
|
||||||
# break out of loop, this value may not be discovered by other schemas/platforms
|
# return early since this value may not be discovered by other schemas/platforms
|
||||||
break
|
return
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
|
@ -3,6 +3,7 @@ from copy import deepcopy
|
|||||||
from unittest.mock import call, patch
|
from unittest.mock import call, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from zwave_js_server.event import Event
|
||||||
from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion
|
from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion
|
||||||
from zwave_js_server.model.node import Node
|
from zwave_js_server.model.node import Node
|
||||||
|
|
||||||
@ -124,6 +125,39 @@ async def test_listen_failure(hass, client, error):
|
|||||||
assert entry.state is ConfigEntryState.SETUP_RETRY
|
assert entry.state is ConfigEntryState.SETUP_RETRY
|
||||||
|
|
||||||
|
|
||||||
|
async def test_new_entity_on_value_added(hass, multisensor_6, client, integration):
|
||||||
|
"""Test we create a new entity if a value is added after the fact."""
|
||||||
|
node: Node = multisensor_6
|
||||||
|
|
||||||
|
# Add a value on a random endpoint so we can be sure we should get a new entity
|
||||||
|
event = Event(
|
||||||
|
type="value added",
|
||||||
|
data={
|
||||||
|
"source": "node",
|
||||||
|
"event": "value added",
|
||||||
|
"nodeId": node.node_id,
|
||||||
|
"args": {
|
||||||
|
"commandClassName": "Multilevel Sensor",
|
||||||
|
"commandClass": 49,
|
||||||
|
"endpoint": 10,
|
||||||
|
"property": "Ultraviolet",
|
||||||
|
"propertyName": "Ultraviolet",
|
||||||
|
"metadata": {
|
||||||
|
"type": "number",
|
||||||
|
"readable": True,
|
||||||
|
"writeable": False,
|
||||||
|
"label": "Ultraviolet",
|
||||||
|
"ccSpecific": {"sensorType": 27, "scale": 0},
|
||||||
|
},
|
||||||
|
"value": 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
node.receive_event(event)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.multisensor_6_ultraviolet_10") is not None
|
||||||
|
|
||||||
|
|
||||||
async def test_on_node_added_ready(hass, multisensor_6_state, client, integration):
|
async def test_on_node_added_ready(hass, multisensor_6_state, client, integration):
|
||||||
"""Test we handle a ready node added event."""
|
"""Test we handle a ready node added event."""
|
||||||
dev_reg = dr.async_get(hass)
|
dev_reg = dr.async_get(hass)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user