Reinitialize zeroconf discovery flow on unignore (#125753)

* Reinitialize zeroconf discovery flow on unignore

* Adjust tests

* Improve comments

* Fix logic for updating discovery keys

* Add tests

* Use mock_config_flow helper in new config_entries test

* Add discovery_keys attribute to ConfigEntry

* Update zeroconf rediscovery

* Change type of ConfigEntry.discovery_keys

* Update tests

* Fix DiscoveryKey.from_json_dict and add tests

* Fix test

---------

Co-authored-by: J. Nick Koston <nick@koston.org>
This commit is contained in:
Erik Montnemery 2024-09-23 16:49:21 +02:00 committed by GitHub
parent f5852b4678
commit b2982c18bb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
97 changed files with 987 additions and 26 deletions

View File

@ -463,9 +463,12 @@ async def ignore_config_flow(
)
return
context = {"source": config_entries.SOURCE_IGNORE}
if "discovery_key" in flow["context"]:
context["discovery_key"] = flow["context"]["discovery_key"]
await hass.config_entries.flow.async_init(
flow["handler"],
context={"source": config_entries.SOURCE_IGNORE},
context=context,
data={"unique_id": flow["context"]["unique_id"], "title": msg["title"]},
)
connection.send_result(msg["id"])

View File

@ -33,6 +33,8 @@ from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.data_entry_flow import BaseServiceInfo
from homeassistant.helpers import discovery_flow, instance_id
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery_flow import DiscoveryKey
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.network import NoURLAvailableError, get_url
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import (
@ -379,11 +381,38 @@ class ZeroconfDiscovery:
self.zeroconf, types, handlers=[self.async_service_update]
)
async_dispatcher_connect(
self.hass,
config_entries.SIGNAL_CONFIG_ENTRY_CHANGED,
self._handle_config_entry_changed,
)
async def async_stop(self) -> None:
"""Cancel the service browser and stop processing the queue."""
if self.async_service_browser:
await self.async_service_browser.async_cancel()
@callback
def _handle_config_entry_changed(
self,
change: config_entries.ConfigEntryChange,
entry: config_entries.ConfigEntry,
) -> None:
"""Handle config entry changes."""
if (
change != config_entries.ConfigEntryChange.REMOVED
or entry.source != config_entries.SOURCE_IGNORE
or not (discovery_keys := entry.discovery_keys)
):
return
for discovery_key in discovery_keys:
if discovery_key.domain != DOMAIN or discovery_key.version != 1:
continue
_type = discovery_key.key[0]
name = discovery_key.key[1]
_LOGGER.debug("Rediscover unignored service %s.%s", _type, name)
self._async_service_update(self.zeroconf, _type, name)
def _async_dismiss_discoveries(self, name: str) -> None:
"""Dismiss all discoveries for the given name."""
for flow in self.hass.config_entries.flow.async_progress_by_init_data_type(
@ -412,6 +441,16 @@ class ZeroconfDiscovery:
self._async_dismiss_discoveries(name)
return
self._async_service_update(zeroconf, service_type, name)
@callback
def _async_service_update(
self,
zeroconf: HaZeroconf,
service_type: str,
name: str,
) -> None:
"""Service state added or changed."""
try:
async_service_info = AsyncServiceInfo(service_type, name)
except BadTypeInNameException as ex:
@ -453,6 +492,11 @@ class ZeroconfDiscovery:
return
_LOGGER.debug("Discovered new device %s %s", name, info)
props: dict[str, str | None] = info.properties
discovery_key = DiscoveryKey(
domain=DOMAIN,
key=(info.type, info.name),
version=1,
)
domain = None
# If we can handle it as a HomeKit discovery, we do that here.
@ -467,6 +511,7 @@ class ZeroconfDiscovery:
homekit_discovery.domain,
{"source": config_entries.SOURCE_HOMEKIT},
info,
discovery_key=discovery_key,
)
# Continue on here as homekit_controller
# still needs to get updates on devices
@ -515,6 +560,7 @@ class ZeroconfDiscovery:
matcher_domain,
context,
info,
discovery_key=discovery_key,
)

View File

@ -49,6 +49,7 @@ from .exceptions import (
)
from .helpers import device_registry, entity_registry, issue_registry as ir, storage
from .helpers.debounce import Debouncer
from .helpers.discovery_flow import DiscoveryKey
from .helpers.dispatcher import SignalType, async_dispatcher_send_internal
from .helpers.event import (
RANDOM_MICROSECOND_MAX,
@ -120,7 +121,7 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry()
STORAGE_KEY = "core.config_entries"
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 3
STORAGE_VERSION_MINOR = 4
SAVE_DELAY = 1
@ -317,6 +318,7 @@ class ConfigEntry(Generic[_DataT]):
_tries: int
created_at: datetime
modified_at: datetime
discovery_keys: tuple[DiscoveryKey, ...]
def __init__(
self,
@ -324,6 +326,7 @@ class ConfigEntry(Generic[_DataT]):
created_at: datetime | None = None,
data: Mapping[str, Any],
disabled_by: ConfigEntryDisabler | None = None,
discovery_keys: tuple[DiscoveryKey, ...],
domain: str,
entry_id: str | None = None,
minor_version: int,
@ -422,6 +425,7 @@ class ConfigEntry(Generic[_DataT]):
_setter(self, "_tries", 0)
_setter(self, "created_at", created_at or utcnow())
_setter(self, "modified_at", modified_at or utcnow())
_setter(self, "discovery_keys", discovery_keys)
def __repr__(self) -> str:
"""Representation of ConfigEntry."""
@ -951,6 +955,7 @@ class ConfigEntry(Generic[_DataT]):
return {
"created_at": self.created_at.isoformat(),
"data": dict(self.data),
"discovery_keys": self.discovery_keys,
"disabled_by": self.disabled_by,
"domain": self.domain,
"entry_id": self.entry_id,
@ -1364,6 +1369,30 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]):
ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id)
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
# If there's an ignored config entry with a matching unique ID,
# update the discovery key.
if (
(discovery_key := flow.context.get("discovery_key"))
and (unique_id := flow.unique_id) is not None
and (
entry := self.config_entries.async_entry_for_domain_unique_id(
result["handler"], unique_id
)
)
and entry.source == SOURCE_IGNORE
and discovery_key not in (known_discovery_keys := entry.discovery_keys)
):
new_discovery_keys = tuple([*known_discovery_keys, discovery_key][-10:])
_LOGGER.debug(
"Updating discovery keys for %s entry %s %s -> %s",
entry.domain,
unique_id,
known_discovery_keys,
new_discovery_keys,
)
self.config_entries.async_update_entry(
entry, discovery_keys=new_discovery_keys
)
return result
# Avoid adding a config entry for a integration
@ -1420,8 +1449,11 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]):
if existing_entry is not None and existing_entry.state.recoverable:
await self.config_entries.async_unload(existing_entry.entry_id)
discovery_key = flow.context.get("discovery_key")
discovery_keys = (discovery_key,) if discovery_key else ()
entry = ConfigEntry(
data=result["data"],
discovery_keys=discovery_keys,
domain=result["handler"],
minor_version=result["minor_version"],
options=result["options"],
@ -1649,6 +1681,11 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]):
for entry in data["entries"]:
entry["created_at"] = entry["modified_at"] = created_at
if old_minor_version < 4:
# Version 1.4 adds discovery_keys
for entry in data["entries"]:
entry["discovery_keys"] = []
if old_major_version > 1:
raise NotImplementedError
return data
@ -1836,6 +1873,9 @@ class ConfigEntries:
created_at=datetime.fromisoformat(entry["created_at"]),
data=entry["data"],
disabled_by=try_parse_enum(ConfigEntryDisabler, entry["disabled_by"]),
discovery_keys=tuple(
DiscoveryKey.from_json_dict(key) for key in entry["discovery_keys"]
),
domain=entry["domain"],
entry_id=entry_id,
minor_version=entry["minor_version"],
@ -1992,6 +2032,7 @@ class ConfigEntries:
entry: ConfigEntry,
*,
data: Mapping[str, Any] | UndefinedType = UNDEFINED,
discovery_keys: tuple[DiscoveryKey, ...] | UndefinedType = UNDEFINED,
minor_version: int | UndefinedType = UNDEFINED,
options: Mapping[str, Any] | UndefinedType = UNDEFINED,
pref_disable_new_entities: bool | UndefinedType = UNDEFINED,
@ -2021,6 +2062,7 @@ class ConfigEntries:
changed = True
for attr, value in (
("discovery_keys", discovery_keys),
("minor_version", minor_version),
("pref_disable_new_entities", pref_disable_new_entities),
("pref_disable_polling", pref_disable_polling),
@ -2451,7 +2493,20 @@ class ConfigFlow(ConfigEntryBaseFlow):
]
async def async_step_ignore(self, user_input: dict[str, Any]) -> ConfigFlowResult:
"""Ignore this config flow."""
"""Ignore this config flow.
Ignoring a config flow works by creating a config entry with source set to
SOURCE_IGNORE.
There will only be a single active discovery flow per device, also when the
integration has multiple discovery sources for the same device. This method
is called when the user ignores a discovered device or service, we then store
the key for the flow being ignored.
Once the ignore config entry is created, ConfigEntriesFlowManager.async_finish_flow
will make sure the discovery key is kept up to date since it may not be stable
unlike the unique id.
"""
await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False)
return self.async_create_entry(title=user_input["title"], data={})

View File

@ -3,25 +3,49 @@
from __future__ import annotations
from collections.abc import Coroutine
from typing import Any, NamedTuple
import dataclasses
from typing import TYPE_CHECKING, Any, NamedTuple, Self
from homeassistant.config_entries import ConfigFlowResult
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.core import CoreState, Event, HomeAssistant, callback
from homeassistant.loader import bind_hass
from homeassistant.util.async_ import gather_with_limited_concurrency
from homeassistant.util.hass_dict import HassKey
if TYPE_CHECKING:
from homeassistant.config_entries import ConfigFlowResult
FLOW_INIT_LIMIT = 20
DISCOVERY_FLOW_DISPATCHER: HassKey[FlowDispatcher] = HassKey(
"discovery_flow_dispatcher"
)
@dataclasses.dataclass(kw_only=True, slots=True)
class DiscoveryKey:
"""Serializable discovery key."""
domain: str
key: str | tuple[str, ...]
version: int
@classmethod
def from_json_dict(cls, json_dict: dict[str, Any]) -> Self:
"""Construct from JSON dict."""
if type(key := json_dict["key"]) is list:
key = tuple(key)
return cls(domain=json_dict["domain"], key=key, version=json_dict["version"])
@bind_hass
@callback
def async_create_flow(
hass: HomeAssistant, domain: str, context: dict[str, Any], data: Any
hass: HomeAssistant,
domain: str,
context: dict[str, Any],
data: Any,
*,
discovery_key: DiscoveryKey | None = None,
) -> None:
"""Create a discovery flow."""
dispatcher: FlowDispatcher | None = None
@ -31,6 +55,9 @@ def async_create_flow(
dispatcher = hass.data[DISCOVERY_FLOW_DISPATCHER] = FlowDispatcher(hass)
dispatcher.async_setup()
if discovery_key:
context = context | {"discovery_key": discovery_key}
if not dispatcher or dispatcher.started:
if init_coro := _async_init_flow(hass, domain, context, data):
hass.async_create_background_task(

View File

@ -990,6 +990,7 @@ class MockConfigEntry(config_entries.ConfigEntry):
*,
data=None,
disabled_by=None,
discovery_keys=(),
domain="test",
entry_id=None,
minor_version=1,
@ -1007,6 +1008,7 @@ class MockConfigEntry(config_entries.ConfigEntry):
kwargs = {
"data": data or {},
"disabled_by": disabled_by,
"discovery_keys": discovery_keys,
"domain": domain,
"entry_id": entry_id or ulid_util.ulid_now(),
"minor_version": minor_version,

View File

@ -11,6 +11,8 @@
'name': 'AEMET',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'aemet',
'entry_id': '7442b231f139e813fc1939281123f220',
'minor_version': 1,

View File

@ -9,6 +9,8 @@
'name': 'Home',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'airly',
'entry_id': '3bd2acb0e4f0476d40865546d0d91921',
'minor_version': 1,

View File

@ -24,6 +24,8 @@
'longitude': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'airnow',
'entry_id': '3bd2acb0e4f0476d40865546d0d91921',
'minor_version': 1,

View File

@ -36,6 +36,8 @@
'longitude': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'airvisual',
'entry_id': '3bd2acb0e4f0476d40865546d0d91921',
'minor_version': 1,

View File

@ -91,6 +91,8 @@
'password': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'airvisual_pro',
'entry_id': '6a2b3770e53c28dc1eeb2515e906b0ce',
'minor_version': 1,

View File

@ -238,6 +238,8 @@
'port': 3000,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'airzone',
'entry_id': '6e7a0798c1734ba81d26ced0e690eaec',
'minor_version': 1,

View File

@ -91,6 +91,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'airzone_cloud',
'entry_id': 'd186e31edb46d64d14b9b2f11f1ebd9f',
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'app_key': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'ambient_station',
'entry_id': '382cf7643f016fd48b3fe52163fe8877',
'minor_version': 1,

View File

@ -36,4 +36,4 @@ async def test_diagnostics(
hass, hass_client, mock_config_entry
)
assert result["entry"] == entry_dict
assert result["entry"] == entry_dict | {"discovery_keys": []}

View File

@ -38,4 +38,4 @@ async def test_diagnostics(
hass, hass_client, mock_config_entry
)
assert result["entry"] == entry_dict
assert result["entry"] == entry_dict | {"discovery_keys": []}

View File

@ -37,6 +37,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'axis',
'entry_id': '676abe5b73621446e6550a2e86ffe3dd',
'minor_version': 1,

View File

@ -38,6 +38,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'blink',
'minor_version': 1,
'options': dict({

View File

@ -9,6 +9,8 @@
'use_psk': True,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'braviatv',
'entry_id': '3bd2acb0e4f0476d40865546d0d91921',
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'location': '',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'co2signal',
'entry_id': '904a74160aa6f335526706bee85dfb83',
'minor_version': 1,

View File

@ -30,6 +30,8 @@
'api_token': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'coinbase',
'entry_id': '080272b77a4f80c41b94d7cdc86fd826',
'minor_version': 1,

View File

@ -17,6 +17,7 @@ from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_entry_flow, config_validation as cv
from homeassistant.helpers.discovery_flow import DiscoveryKey
from homeassistant.loader import IntegrationNotFound
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
@ -1317,8 +1318,27 @@ async def test_disable_entry_nonexisting(
assert response["error"]["code"] == "not_found"
@pytest.mark.parametrize(
(
"flow_context",
"entry_discovery_keys",
),
[
(
{},
(),
),
(
{"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)},
(DiscoveryKey(domain="test", key="blah", version=1),),
),
],
)
async def test_ignore_flow(
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
flow_context: dict,
entry_discovery_keys: tuple,
) -> None:
"""Test we can ignore a flow."""
assert await async_setup_component(hass, "config", {})
@ -1341,7 +1361,7 @@ async def test_ignore_flow(
with patch.dict(HANDLERS, {"test": TestFlow}):
result = await hass.config_entries.flow.async_init(
"test", context={"source": core_ce.SOURCE_USER}
"test", context={"source": core_ce.SOURCE_USER} | flow_context
)
assert result["type"] is FlowResultType.FORM
@ -1363,6 +1383,8 @@ async def test_ignore_flow(
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.title == "Test Integration"
assert entry.data == {}
assert entry.discovery_keys == entry_discovery_keys
async def test_ignore_flow_nonexisting(

View File

@ -10,6 +10,8 @@
'port': 80,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'deconz',
'entry_id': '1',
'minor_version': 1,

View File

@ -38,6 +38,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'devolo_home_control',
'entry_id': '123456',
'minor_version': 1,

View File

@ -22,6 +22,8 @@
'password': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'devolo_home_network',
'entry_id': '123456',
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'data': dict({
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'dsmr_reader',
'entry_id': 'TEST_ENTRY_ID',
'minor_version': 1,

View File

@ -8,6 +8,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'ecovacs',
'minor_version': 1,
'options': dict({
@ -59,6 +61,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'ecovacs',
'minor_version': 1,
'options': dict({

View File

@ -24,6 +24,8 @@
'port': 9123,
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'elgato',
'entry_id': <ANY>,
'minor_version': 1,
@ -67,6 +69,8 @@
'port': 9123,
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'elgato',
'entry_id': <ANY>,
'minor_version': 1,
@ -109,6 +113,8 @@
'port': 9123,
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'elgato',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -18,6 +18,8 @@
'data': dict({
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'energyzero',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -10,6 +10,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'enphase_envoy',
'entry_id': '45a36e55aaddb2007c5f6602e0c38e72',
'minor_version': 1,
@ -441,6 +443,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'enphase_envoy',
'entry_id': '45a36e55aaddb2007c5f6602e0c38e72',
'minor_version': 1,
@ -913,6 +917,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'enphase_envoy',
'entry_id': '45a36e55aaddb2007c5f6602e0c38e72',
'minor_version': 1,

View File

@ -10,6 +10,8 @@
'port': 6053,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'esphome',
'entry_id': '08d821dc059cf4f645cb024d32c8e708',
'minor_version': 1,

View File

@ -70,6 +70,7 @@ async def test_diagnostics_with_bluetooth(
"port": 6053,
},
"disabled_by": None,
"discovery_keys": [],
"domain": "esphome",
"entry_id": ANY,
"minor_version": 1,

View File

@ -6,6 +6,8 @@
'longitude': 4.42,
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'forecast_solar',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -52,6 +52,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'fritz',
'minor_version': 1,
'options': dict({

View File

@ -30,4 +30,4 @@ async def test_entry_diagnostics(
result = await get_diagnostics_for_config_entry(hass, hass_client, entries[0])
assert result == {"entry": entry_dict, "data": {}}
assert result == {"entry": entry_dict | {"discovery_keys": []}, "data": {}}

View File

@ -7,6 +7,8 @@
'is_logger': True,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'fronius',
'entry_id': 'f1e2b9837e8adaed6fa682acaa216fd8',
'minor_version': 1,

View File

@ -9,6 +9,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'fyta',
'entry_id': 'ce5f5431554d101905d31797e1232da8',
'minor_version': 2,

View File

@ -39,6 +39,8 @@
'address': '00000000-0000-0000-0000-000000000001',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'gardena_bluetooth',
'entry_id': <ANY>,
'minor_version': 1,
@ -248,6 +250,8 @@
'address': '00000000-0000-0000-0000-000000000001',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'gardena_bluetooth',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'station_id': 123,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'gios',
'entry_id': '86129426118ae32020417a53712d6eef',
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'model_family': 'ET',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'goodwe',
'entry_id': '3bd2acb0e4f0476d40865546d0d91921',
'minor_version': 1,

View File

@ -6,6 +6,8 @@
'project_id': '1234',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'google_assistant',
'minor_version': 1,
'options': dict({

View File

@ -41,6 +41,7 @@ async def test_entry_diagnostics(
"disabled_by": None,
"created_at": ANY,
"modified_at": ANY,
"discovery_keys": [],
},
"data": {
"valve_controller": {

View File

@ -20,6 +20,8 @@
'ip_address': '127.0.0.1',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'homewizard',
'entry_id': <ANY>,
'minor_version': 1,
@ -62,6 +64,8 @@
'ip_address': '127.0.0.1',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'homewizard',
'entry_id': <ANY>,
'minor_version': 1,
@ -104,6 +108,8 @@
'ip_address': '127.0.0.1',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'homewizard',
'entry_id': <ANY>,
'minor_version': 1,
@ -142,6 +148,8 @@
'ip_address': '2.2.2.2',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'homewizard',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -175,6 +175,8 @@
}),
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'husqvarna_automower',
'entry_id': 'automower_test',
'minor_version': 1,

View File

@ -6,6 +6,8 @@
'station_id': '123',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'imgw_pib',
'minor_version': 1,
'options': dict({

View File

@ -348,6 +348,8 @@
'zip_code': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'iqvia',
'entry_id': '690ac4b7e99855fc5ee7b987a758d5cb',
'minor_version': 1,

View File

@ -56,6 +56,7 @@ async def test_entry_diagnostics(
"disabled_by": None,
"created_at": ANY,
"modified_at": ANY,
"discovery_keys": [],
},
"client": {
"version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'",

View File

@ -15,6 +15,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'lacrosse_view',
'entry_id': 'lacrosse_view_test_entry_id',
'minor_version': 1,

View File

@ -63,6 +63,8 @@
'site_id': 'test-site-id',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'linear_garage_door',
'entry_id': 'acefdd4b3a4a0911067d1cf51414201e',
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'port': 44077,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'madvr',
'entry_id': '3bd2acb0e4f0476d40865546d0d91132',
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'data': dict({
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'melcloud',
'entry_id': 'TEST_ENTRY_ID',
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'mac': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'modern_forms',
'minor_version': 1,
'options': dict({

View File

@ -18,6 +18,8 @@
'mac_code': 'CCCC',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'motionblinds_ble',
'entry_id': 'mock_entry_id',
'minor_version': 1,

View File

@ -608,6 +608,8 @@
'webhook_id': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'netatmo',
'minor_version': 1,
'options': dict({

View File

@ -7,6 +7,8 @@
'profile_id': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'nextdns',
'entry_id': 'd9aa37407ddac7b964a99e86312288d6',
'minor_version': 1,

View File

@ -37,6 +37,8 @@
'refresh_token': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'nice_go',
'entry_id': 'acefdd4b3a4a0911067d1cf51414201e',
'minor_version': 1,

View File

@ -36,6 +36,7 @@ async def test_entry_diagnostics(
"disabled_by": None,
"created_at": ANY,
"modified_at": ANY,
"discovery_keys": [],
},
"data": {
"bridges": [

View File

@ -39,5 +39,5 @@ async def test_diagnostics(
result = await get_diagnostics_for_config_entry(
hass, hass_client, mock_config_entry
)
assert result["entry"] == entry_dict
assert result["entry"] == entry_dict | {"discovery_keys": []}
assert result["nut_data"] == nut_data_dict

View File

@ -11,6 +11,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'onvif',
'entry_id': '1',
'minor_version': 1,

View File

@ -38,6 +38,7 @@ async def test_entry_diagnostics(
"disabled_by": None,
"created_at": ANY,
"modified_at": ANY,
"discovery_keys": [],
},
"data": {
"protection_window": {

View File

@ -85,6 +85,8 @@
}),
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'philips_js',
'minor_version': 1,
'options': dict({

View File

@ -23,6 +23,8 @@
'verify_ssl': True,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'pi_hole',
'entry_id': 'pi_hole_mock_entry',
'minor_version': 1,

View File

@ -93,6 +93,8 @@
'zone': 'zone.home',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'proximity',
'minor_version': 1,
'options': dict({

View File

@ -37,6 +37,7 @@ async def test_entry_diagnostics(
"disabled_by": None,
"created_at": ANY,
"modified_at": ANY,
"discovery_keys": [],
},
"data": {
"fields": [

View File

@ -27,7 +27,7 @@ async def test_entry_diagnostics(
config_entry_dict["data"][CONF_CLOUD_ID] = REDACTED
assert result == {
"config_entry": config_entry_dict,
"config_entry": config_entry_dict | {"discovery_keys": []},
"data": {
var["Name"]: var["Value"]
for var in MOCK_200_RESPONSE_WITHOUT_PRICE.values()

View File

@ -40,7 +40,7 @@ async def test_entry_diagnostics_no_meters(
config_entry_dict["data"][CONF_MAC] = REDACTED
assert result == {
"config_entry": config_entry_dict,
"config_entry": config_entry_dict | {"discovery_keys": []},
"data": {
"Meters": {},
"NetworkInfo": {**asdict(NETWORK_INFO), "device_mac_id": REDACTED},
@ -58,7 +58,7 @@ async def test_entry_diagnostics(
config_entry_dict["data"][CONF_MAC] = REDACTED
assert result == {
"config_entry": config_entry_dict,
"config_entry": config_entry_dict | {"discovery_keys": []},
"data": {
"Meters": {
"**REDACTED0**": {

View File

@ -1131,6 +1131,8 @@
'ssl': True,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'rainmachine',
'entry_id': '81bd010ed0a63b705f6da8407cb26d4b',
'minor_version': 1,
@ -2260,6 +2262,8 @@
'ssl': True,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'rainmachine',
'entry_id': '81bd010ed0a63b705f6da8407cb26d4b',
'minor_version': 1,

View File

@ -33,6 +33,7 @@ async def test_entry_diagnostics(
"disabled_by": None,
"created_at": ANY,
"modified_at": ANY,
"discovery_keys": [],
},
"data": [
{

View File

@ -34,6 +34,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'ridwell',
'entry_id': '11554ec901379b9cc8f5a6c1d11ce978',
'minor_version': 1,

View File

@ -42,6 +42,7 @@ async def test_entry_diagnostics(
"token": REDACTED,
},
"disabled_by": None,
"discovery_keys": [],
"domain": "samsungtv",
"entry_id": "123456",
"minor_version": 2,
@ -81,6 +82,7 @@ async def test_entry_diagnostics_encrypted(
"session_id": REDACTED,
},
"disabled_by": None,
"discovery_keys": [],
"domain": "samsungtv",
"entry_id": "123456",
"minor_version": 2,
@ -119,6 +121,7 @@ async def test_entry_diagnostics_encrypte_offline(
"session_id": REDACTED,
},
"disabled_by": None,
"discovery_keys": [],
"domain": "samsungtv",
"entry_id": "123456",
"minor_version": 2,

View File

@ -7,6 +7,8 @@
'port': 80,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'screenlogic',
'entry_id': 'screenlogictest',
'minor_version': 1,

View File

@ -45,7 +45,7 @@ async def test_block_config_entry_diagnostics(
result = await get_diagnostics_for_config_entry(hass, hass_client, entry)
assert result == {
"entry": entry_dict,
"entry": entry_dict | {"discovery_keys": []},
"bluetooth": "not initialized",
"device_info": {
"name": "Test name",
@ -105,7 +105,7 @@ async def test_rpc_config_entry_diagnostics(
result = await get_diagnostics_for_config_entry(hass, hass_client, entry)
assert result == {
"entry": entry_dict,
"entry": entry_dict | {"discovery_keys": []},
"bluetooth": {
"scanner": {
"connectable": False,

View File

@ -31,6 +31,7 @@ async def test_entry_diagnostics(
"disabled_by": None,
"created_at": ANY,
"modified_at": ANY,
"discovery_keys": [],
},
"subscription_data": {
"12345": {

View File

@ -9,6 +9,8 @@
'password': 'pwd',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'solarlog',
'entry_id': 'ce5f5431554d101905d31797e1232da8',
'minor_version': 3,

View File

@ -68,5 +68,6 @@ async def test_diagnostics(
"disabled_by": None,
"created_at": ANY,
"modified_at": ANY,
"discovery_keys": [],
},
}

View File

@ -34,6 +34,8 @@
'data': dict({
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'systemmonitor',
'minor_version': 3,
'options': dict({

View File

@ -22,6 +22,8 @@
'token': '987654',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'tailwind',
'entry_id': <ANY>,
'minor_version': 1,
@ -66,6 +68,8 @@
'token': '987654',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'tailwind',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -26,6 +26,8 @@
]),
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'tankerkoenig',
'entry_id': '8036b4412f2fae6bb9dbab7fe8e37f87',
'minor_version': 1,

View File

@ -7,6 +7,8 @@
'password': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'tractive',
'entry_id': '3bd2acb0e4f0476d40865546d0d91921',
'minor_version': 1,

View File

@ -14,6 +14,8 @@
'user_code': '12345',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'tuya',
'entry_id': <ANY>,
'minor_version': 1,
@ -42,6 +44,8 @@
'user_code': '12345',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'tuya',
'entry_id': <ANY>,
'minor_version': 1,
@ -93,6 +97,8 @@
'user_code': '12345',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'tuya',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -26,6 +26,8 @@
'post_code': '1234AB',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'twentemilieu',
'entry_id': <ANY>,
'minor_version': 1,
@ -70,6 +72,8 @@
'post_code': '1234AB',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'twentemilieu',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -27,6 +27,8 @@
'name': 'twinkly_test_device_name',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'twinkly',
'entry_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2',
'minor_version': 1,

View File

@ -27,6 +27,8 @@
'verify_ssl': False,
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'unifi',
'entry_id': '1',
'minor_version': 1,

View File

@ -17,6 +17,8 @@
'data': dict({
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'uptime',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -5,6 +5,8 @@
'data': dict({
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'utility_meter',
'minor_version': 1,
'options': dict({

View File

@ -6,6 +6,8 @@
'host': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'v2c',
'entry_id': 'da58ee91f38c2406c2a36d0a1a7f8569',
'minor_version': 1,

View File

@ -4721,6 +4721,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'vicare',
'entry_id': '1234',
'minor_version': 1,

View File

@ -18,6 +18,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'watttime',
'minor_version': 1,
'options': dict({

View File

@ -237,6 +237,8 @@
'data': dict({
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'webmin',
'entry_id': '**REDACTED**',
'minor_version': 1,

View File

@ -60,5 +60,6 @@ async def test_diagnostics(
"disabled_by": None,
"created_at": entry.created_at.isoformat(),
"modified_at": entry.modified_at.isoformat(),
"discovery_keys": [],
},
}

View File

@ -29,6 +29,8 @@
'username': '**REDACTED**',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'whirlpool',
'minor_version': 1,
'options': dict({

View File

@ -20,6 +20,8 @@
'domain': 'example.com',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'whois',
'entry_id': <ANY>,
'minor_version': 1,
@ -58,6 +60,8 @@
'domain': 'example.com',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'whois',
'entry_id': <ANY>,
'minor_version': 1,
@ -96,6 +100,8 @@
'domain': 'example.com',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'whois',
'entry_id': <ANY>,
'minor_version': 1,
@ -134,6 +140,8 @@
'domain': 'example.com',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'whois',
'entry_id': <ANY>,
'minor_version': 1,
@ -172,6 +180,8 @@
'domain': 'example.com',
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'whois',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -26,6 +26,8 @@
'port': 10200,
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'wyoming',
'entry_id': <ANY>,
'minor_version': 1,
@ -70,6 +72,8 @@
'port': 10200,
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'wyoming',
'entry_id': <ANY>,
'minor_version': 1,
@ -114,6 +118,8 @@
'port': 12345,
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'wyoming',
'entry_id': <ANY>,
'minor_version': 1,

View File

@ -12,6 +12,7 @@ from zeroconf import (
)
from zeroconf.asyncio import AsyncServiceInfo
from homeassistant import config_entries
from homeassistant.components import zeroconf
from homeassistant.const import (
EVENT_COMPONENT_LOADED,
@ -22,8 +23,11 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant
from homeassistant.generated import zeroconf as zc_gen
from homeassistant.helpers.discovery_flow import DiscoveryKey
from homeassistant.setup import ATTR_COMPONENT, async_setup_component
from tests.common import MockConfigEntry, MockModule, mock_integration
NON_UTF8_VALUE = b"ABCDEF\x8a"
NON_ASCII_KEY = b"non-ascii-key\x8a"
PROPERTIES = {
@ -303,7 +307,14 @@ async def test_zeroconf_match_macaddress(hass: HomeAssistant) -> None:
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "shelly"
assert mock_config_flow.mock_calls[0][2]["context"] == {"source": "zeroconf"}
assert mock_config_flow.mock_calls[0][2]["context"] == {
"discovery_key": DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
"source": "zeroconf",
}
@pytest.mark.usefixtures("mock_async_zeroconf")
@ -542,6 +553,11 @@ async def test_homekit_match_partial_space(hass: HomeAssistant) -> None:
assert mock_config_flow.mock_calls[1][2]["context"] == {
"source": "zeroconf",
"alternative_domain": "lifx",
"discovery_key": DiscoveryKey(
domain="zeroconf",
key=("_hap._tcp.local.", "_name._hap._tcp.local."),
version=1,
),
}
@ -1381,3 +1397,353 @@ async def test_zeroconf_removed(hass: HomeAssistant) -> None:
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_async_progress_by_init_data_type.mock_calls) == 1
assert mock_async_abort.mock_calls[0][1][0] == "mock_flow_id"
@pytest.mark.usefixtures("mock_async_zeroconf")
@pytest.mark.parametrize(
(
"entry_domain",
"entry_discovery_keys",
),
[
# Matching discovery key
(
"shelly",
(
DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
),
),
# Matching discovery key
(
"shelly",
(
DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
DiscoveryKey(
domain="other",
key="blah",
version=1,
),
),
),
# Matching discovery key, other domain
# Note: Rediscovery is not currently restricted to the domain of the removed
# entry. Such a check can be added if needed.
(
"comp",
(
DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
),
),
],
)
async def test_zeroconf_rediscover(
hass: HomeAssistant,
entry_domain: str,
entry_discovery_keys: tuple,
) -> None:
"""Test we reinitiate flows when an ignored config entry is removed."""
def http_only_service_update_mock(zeroconf, services, handlers):
"""Call service update handler."""
handlers[0](
zeroconf,
"_http._tcp.local.",
"Shelly108._http._tcp.local.",
ServiceStateChange.Added,
)
entry = MockConfigEntry(
domain=entry_domain,
discovery_keys=entry_discovery_keys,
unique_id="mock-unique-id",
state=config_entries.ConfigEntryState.LOADED,
source=config_entries.SOURCE_IGNORE,
)
entry.add_to_hass(hass)
with (
patch.dict(
zc_gen.ZEROCONF,
{
"_http._tcp.local.": [
{
"domain": "shelly",
"name": "shelly*",
"properties": {"macaddress": "ffaadd*"},
}
]
},
clear=True,
),
patch.object(hass.config_entries.flow, "async_init") as mock_config_flow,
patch.object(
zeroconf, "AsyncServiceBrowser", side_effect=http_only_service_update_mock
) as mock_service_browser,
patch(
"homeassistant.components.zeroconf.AsyncServiceInfo",
side_effect=get_zeroconf_info_mock("FFAADDCC11DD"),
),
):
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
expected_context = {
"discovery_key": DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
"source": "zeroconf",
}
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "shelly"
assert mock_config_flow.mock_calls[0][2]["context"] == expected_context
await hass.config_entries.async_remove(entry.entry_id)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 3
assert mock_config_flow.mock_calls[1][1][0] == entry_domain
assert mock_config_flow.mock_calls[1][2]["context"] == {
"source": "unignore",
}
assert mock_config_flow.mock_calls[2][1][0] == "shelly"
assert mock_config_flow.mock_calls[2][2]["context"] == expected_context
@pytest.mark.usefixtures("mock_async_zeroconf")
@pytest.mark.parametrize(
(
"entry_domain",
"entry_discovery_keys",
"entry_source",
"entry_unique_id",
),
[
# Discovery key from other domain
(
"shelly",
(
DiscoveryKey(
domain="bluetooth",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
),
config_entries.SOURCE_IGNORE,
"mock-unique-id",
),
# Discovery key from the future
(
"shelly",
(
DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=2,
),
),
config_entries.SOURCE_IGNORE,
"mock-unique-id",
),
],
)
async def test_zeroconf_rediscover_no_match(
hass: HomeAssistant,
entry_domain: str,
entry_discovery_keys: tuple,
entry_source: str,
entry_unique_id: str,
) -> None:
"""Test we don't reinitiate flows when a non matching config entry is removed."""
def http_only_service_update_mock(zeroconf, services, handlers):
"""Call service update handler."""
handlers[0](
zeroconf,
"_http._tcp.local.",
"Shelly108._http._tcp.local.",
ServiceStateChange.Added,
)
hass.config.components.add(entry_domain)
mock_integration(hass, MockModule(entry_domain))
entry = MockConfigEntry(
domain=entry_domain,
discovery_keys=entry_discovery_keys,
unique_id=entry_unique_id,
state=config_entries.ConfigEntryState.LOADED,
source=entry_source,
)
entry.add_to_hass(hass)
with (
patch.dict(
zc_gen.ZEROCONF,
{
"_http._tcp.local.": [
{
"domain": "shelly",
"name": "shelly*",
"properties": {"macaddress": "ffaadd*"},
}
]
},
clear=True,
),
patch.object(hass.config_entries.flow, "async_init") as mock_config_flow,
patch.object(
zeroconf, "AsyncServiceBrowser", side_effect=http_only_service_update_mock
) as mock_service_browser,
patch(
"homeassistant.components.zeroconf.AsyncServiceInfo",
side_effect=get_zeroconf_info_mock("FFAADDCC11DD"),
),
):
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
expected_context = {
"discovery_key": DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
"source": "zeroconf",
}
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "shelly"
assert mock_config_flow.mock_calls[0][2]["context"] == expected_context
await hass.config_entries.async_remove(entry.entry_id)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 2
assert mock_config_flow.mock_calls[1][1][0] == entry_domain
assert mock_config_flow.mock_calls[1][2]["context"] == {
"source": "unignore",
}
@pytest.mark.usefixtures("mock_async_zeroconf")
@pytest.mark.parametrize(
(
"entry_domain",
"entry_discovery_keys",
"entry_source",
"entry_unique_id",
),
[
# Source not SOURCE_IGNORE
(
"shelly",
(
DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
),
config_entries.SOURCE_ZEROCONF,
"mock-unique-id",
),
],
)
async def test_zeroconf_rediscover_no_match_2(
hass: HomeAssistant,
entry_domain: str,
entry_discovery_keys: tuple,
entry_source: str,
entry_unique_id: str,
) -> None:
"""Test we don't reinitiate flows when a non matching config entry is removed.
This test can be merged with test_zeroconf_rediscover_no_match when
async_step_unignore has been removed from the ConfigFlow base class.
"""
def http_only_service_update_mock(zeroconf, services, handlers):
"""Call service update handler."""
handlers[0](
zeroconf,
"_http._tcp.local.",
"Shelly108._http._tcp.local.",
ServiceStateChange.Added,
)
hass.config.components.add(entry_domain)
mock_integration(hass, MockModule(entry_domain))
entry = MockConfigEntry(
domain=entry_domain,
discovery_keys=entry_discovery_keys,
unique_id=entry_unique_id,
state=config_entries.ConfigEntryState.LOADED,
source=entry_source,
)
entry.add_to_hass(hass)
with (
patch.dict(
zc_gen.ZEROCONF,
{
"_http._tcp.local.": [
{
"domain": "shelly",
"name": "shelly*",
"properties": {"macaddress": "ffaadd*"},
}
]
},
clear=True,
),
patch.object(hass.config_entries.flow, "async_init") as mock_config_flow,
patch.object(
zeroconf, "AsyncServiceBrowser", side_effect=http_only_service_update_mock
) as mock_service_browser,
patch(
"homeassistant.components.zeroconf.AsyncServiceInfo",
side_effect=get_zeroconf_info_mock("FFAADDCC11DD"),
),
):
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
expected_context = {
"discovery_key": DiscoveryKey(
domain="zeroconf",
key=("_http._tcp.local.", "Shelly108._http._tcp.local."),
version=1,
),
"source": "zeroconf",
}
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "shelly"
assert mock_config_flow.mock_calls[0][2]["context"] == expected_context
await hass.config_entries.async_remove(entry.entry_id)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1

View File

@ -93,6 +93,8 @@
'radio_type': 'ezsp',
}),
'disabled_by': None,
'discovery_keys': list([
]),
'domain': 'zha',
'minor_version': 1,
'options': dict({

View File

@ -8,7 +8,8 @@ import pytest
from homeassistant import config_entries
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.core import CoreState, HomeAssistant
from homeassistant.helpers import discovery_flow
from homeassistant.helpers import discovery_flow, json as json_helper
from homeassistant.helpers.discovery_flow import DiscoveryKey
@pytest.fixture
@ -20,8 +21,29 @@ def mock_flow_init(hass: HomeAssistant) -> Generator[AsyncMock]:
yield mock_init
@pytest.mark.parametrize(
("discovery_key", "context"),
[
(None, {}),
(
DiscoveryKey(domain="test", key="string_key", version=1),
{"discovery_key": DiscoveryKey(domain="test", key="string_key", version=1)},
),
(
DiscoveryKey(domain="test", key=("one", "two"), version=1),
{
"discovery_key": DiscoveryKey(
domain="test", key=("one", "two"), version=1
)
},
),
],
)
async def test_async_create_flow(
hass: HomeAssistant, mock_flow_init: AsyncMock
hass: HomeAssistant,
mock_flow_init: AsyncMock,
discovery_key: DiscoveryKey | None,
context: {},
) -> None:
"""Test we can create a flow."""
discovery_flow.async_create_flow(
@ -29,11 +51,12 @@ async def test_async_create_flow(
"hue",
{"source": config_entries.SOURCE_HOMEKIT},
{"properties": {"id": "aa:bb:cc:dd:ee:ff"}},
discovery_key=discovery_key,
)
assert mock_flow_init.mock_calls == [
call(
"hue",
context={"source": "homekit"},
context={"source": "homekit"} | context,
data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
]
@ -118,3 +141,16 @@ async def test_async_create_flow_does_nothing_after_stop(
{"properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
assert len(mock_flow_init.mock_calls) == 0
@pytest.mark.parametrize("key", ["test", ("blah", "bleh")])
def test_discovery_key_serialize_deserialize(key: str | tuple[str]) -> None:
"""Test serialize and deserialize discovery key."""
discovery_key_1 = discovery_flow.DiscoveryKey(
domain="test_domain", key=key, version=1
)
serialized = json_helper.json_dumps(discovery_key_1)
assert (
discovery_flow.DiscoveryKey.from_json_dict(json_helper.json_loads(serialized))
== discovery_key_1
)

View File

@ -5,6 +5,8 @@
'data': dict({
}),
'disabled_by': None,
'discovery_keys': tuple(
),
'domain': 'test',
'entry_id': 'mock-entry',
'minor_version': 1,

View File

@ -38,6 +38,7 @@ from homeassistant.exceptions import (
HomeAssistantError,
)
from homeassistant.helpers import entity_registry as er, issue_registry as ir
from homeassistant.helpers.discovery_flow import DiscoveryKey
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
@ -884,10 +885,21 @@ async def test_saving_and_loading(
with patch("homeassistant.config_entries.HANDLERS.get", return_value=Test2Flow):
await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_USER}
"test",
context={
"source": config_entries.SOURCE_USER,
"discovery_key": DiscoveryKey(domain="test", key=("blah"), version=1),
},
)
await hass.config_entries.flow.async_init(
"test",
context={
"source": config_entries.SOURCE_USER,
"discovery_key": DiscoveryKey(domain="test", key=("a", "b"), version=1),
},
)
assert len(hass.config_entries.async_entries()) == 2
assert len(hass.config_entries.async_entries()) == 3
entry_1 = hass.config_entries.async_entries()[0]
hass.config_entries.async_update_entry(
@ -906,7 +918,7 @@ async def test_saving_and_loading(
manager = config_entries.ConfigEntries(hass, {})
await manager.async_initialize()
assert len(manager.async_entries()) == 2
assert len(manager.async_entries()) == 3
# Ensure same order
for orig, loaded in zip(
@ -2739,8 +2751,24 @@ async def test_finish_flow_aborts_progress(
assert len(hass.config_entries.flow.async_progress()) == 0
@pytest.mark.parametrize(
("extra_context", "expected_entry_discovery_keys"),
[
(
{},
(),
),
(
{"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)},
(DiscoveryKey(domain="test", key="blah", version=1),),
),
],
)
async def test_unique_id_ignore(
hass: HomeAssistant, manager: config_entries.ConfigEntries
hass: HomeAssistant,
manager: config_entries.ConfigEntries,
extra_context: dict,
expected_entry_discovery_keys: dict,
) -> None:
"""Test that we can ignore flows that are in progress and have a unique ID."""
async_setup_entry = AsyncMock(return_value=False)
@ -2766,7 +2794,7 @@ async def test_unique_id_ignore(
result2 = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
context={"source": config_entries.SOURCE_IGNORE} | extra_context,
data={"unique_id": "mock-unique-id", "title": "Ignored Title"},
)
@ -2782,6 +2810,8 @@ async def test_unique_id_ignore(
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.title == "Ignored Title"
assert entry.data == {}
assert entry.discovery_keys == expected_entry_discovery_keys
async def test_manual_add_overrides_ignored_entry(
@ -2878,6 +2908,184 @@ async def test_manual_add_overrides_ignored_entry_singleton(
assert p_entry.data == {"token": "supersecret"}
@pytest.mark.parametrize(
(
"discovery_keys",
"entry_source",
"entry_unique_id",
"flow_context",
"flow_source",
"flow_result",
"updated_discovery_keys",
),
[
# No discovery key
(
(),
config_entries.SOURCE_IGNORE,
"mock-unique-id",
{},
config_entries.SOURCE_ZEROCONF,
data_entry_flow.FlowResultType.ABORT,
(),
),
# Discovery key added to ignored entry data
(
(),
config_entries.SOURCE_IGNORE,
"mock-unique-id",
{"discovery_key": {"domain": "test", "key": "blah", "version": 1}},
config_entries.SOURCE_ZEROCONF,
data_entry_flow.FlowResultType.ABORT,
({"domain": "test", "key": "blah", "version": 1},),
),
# Discovery key added to ignored entry data
(
({"domain": "test", "key": "bleh", "version": 1},),
config_entries.SOURCE_IGNORE,
"mock-unique-id",
{"discovery_key": {"domain": "test", "key": "blah", "version": 1}},
config_entries.SOURCE_ZEROCONF,
data_entry_flow.FlowResultType.ABORT,
(
{"domain": "test", "key": "bleh", "version": 1},
{"domain": "test", "key": "blah", "version": 1},
),
),
# Discovery key added to ignored entry data
(
(
{"domain": "test", "key": "1", "version": 1},
{"domain": "test", "key": "2", "version": 1},
{"domain": "test", "key": "3", "version": 1},
{"domain": "test", "key": "4", "version": 1},
{"domain": "test", "key": "5", "version": 1},
{"domain": "test", "key": "6", "version": 1},
{"domain": "test", "key": "7", "version": 1},
{"domain": "test", "key": "8", "version": 1},
{"domain": "test", "key": "9", "version": 1},
{"domain": "test", "key": "10", "version": 1},
),
config_entries.SOURCE_IGNORE,
"mock-unique-id",
{"discovery_key": {"domain": "test", "key": "11", "version": 1}},
config_entries.SOURCE_ZEROCONF,
data_entry_flow.FlowResultType.ABORT,
(
{"domain": "test", "key": "2", "version": 1},
{"domain": "test", "key": "3", "version": 1},
{"domain": "test", "key": "4", "version": 1},
{"domain": "test", "key": "5", "version": 1},
{"domain": "test", "key": "6", "version": 1},
{"domain": "test", "key": "7", "version": 1},
{"domain": "test", "key": "8", "version": 1},
{"domain": "test", "key": "9", "version": 1},
{"domain": "test", "key": "10", "version": 1},
{"domain": "test", "key": "11", "version": 1},
),
),
# Discovery key already in ignored entry data
(
({"domain": "test", "key": "blah", "version": 1},),
config_entries.SOURCE_IGNORE,
"mock-unique-id",
{"discovery_key": {"domain": "test", "key": "blah", "version": 1}},
config_entries.SOURCE_ZEROCONF,
data_entry_flow.FlowResultType.ABORT,
({"domain": "test", "key": "blah", "version": 1},),
),
# Discovery key not added to user entry data
(
(),
config_entries.SOURCE_USER,
"mock-unique-id",
{"discovery_key": {"domain": "test", "key": "blah", "version": 1}},
config_entries.SOURCE_ZEROCONF,
data_entry_flow.FlowResultType.ABORT,
(),
),
# Flow not aborted when unique id is not matching
(
(),
config_entries.SOURCE_IGNORE,
"mock-unique-id-2",
{"discovery_key": {"domain": "test", "key": "blah", "version": 1}},
config_entries.SOURCE_ZEROCONF,
data_entry_flow.FlowResultType.FORM,
(),
),
# Flow not aborted when user initiated flow
(
(),
config_entries.SOURCE_IGNORE,
"mock-unique-id-2",
{"discovery_key": {"domain": "test", "key": "blah", "version": 1}},
config_entries.SOURCE_USER,
data_entry_flow.FlowResultType.FORM,
(),
),
],
)
async def test_ignored_entry_update_discovery_keys(
hass: HomeAssistant,
manager: config_entries.ConfigEntries,
discovery_keys: tuple,
entry_source: str,
entry_unique_id: str,
flow_context: dict,
flow_source: str,
flow_result: data_entry_flow.FlowResultType,
updated_discovery_keys: tuple,
) -> None:
"""Test that discovery keys of an ignored entry can be updated."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
discovery_keys=discovery_keys,
unique_id=entry_unique_id,
state=config_entries.ConfigEntryState.LOADED,
source=entry_source,
)
entry.add_to_hass(hass)
mock_integration(hass, MockModule("comp"))
mock_platform(hass, "comp.config_flow", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
self._abort_if_unique_id_configured(reload_on_update=False)
return self.async_show_form(step_id="step2")
async def async_step_step2(self, user_input=None):
raise NotImplementedError
async def async_step_zeroconf(self, discovery_info=None):
"""Test zeroconf step."""
return await self.async_step_user(discovery_info)
with (
mock_config_flow("comp", TestFlow),
patch(
"homeassistant.config_entries.ConfigEntries.async_reload"
) as async_reload,
):
result = await manager.flow.async_init(
"comp", context={"source": flow_source} | flow_context
)
await hass.async_block_till_done()
assert result["type"] == flow_result
assert entry.data == {}
assert entry.discovery_keys == updated_discovery_keys
assert len(async_reload.mock_calls) == 0
async def test_async_current_entries_does_not_skip_ignore_non_user(
hass: HomeAssistant, manager: config_entries.ConfigEntries
) -> None:
@ -5043,6 +5251,7 @@ async def test_unhashable_unique_id(
entries = config_entries.ConfigEntryItems(hass)
entry = config_entries.ConfigEntry(
data={},
discovery_keys=(),
domain="test",
entry_id="mock_id",
minor_version=1,
@ -5075,6 +5284,7 @@ async def test_hashable_non_string_unique_id(
entries = config_entries.ConfigEntryItems(hass)
entry = config_entries.ConfigEntry(
data={},
discovery_keys=(),
domain="test",
entry_id="mock_id",
minor_version=1,
@ -5976,6 +6186,7 @@ async def test_migration_from_1_2(
"created_at": "1970-01-01T00:00:00+00:00",
"data": {},
"disabled_by": None,
"discovery_keys": [],
"domain": "sun",
"entry_id": "0a8bd02d0d58c7debf5daf7941c9afe2",
"minor_version": 1,