Compare commits

...

44 Commits

Author SHA1 Message Date
Paulus Schoutsen
fbe1524f6c Bumped version to 2023.3.0b4 2023-02-26 22:37:34 -05:00
J. Nick Koston
95e337277c Avoid starting a bluetooth poll when Home Assistant is stopping (#88819)
* Avoid starting a bluetooth poll when Home Assistant is stopping

* tests
2023-02-26 22:37:26 -05:00
J. Nick Koston
1503674bd6 Prevent integrations from retrying setup once shutdown has started (#88818)
* Prevent integrations from retrying setup once shutdown has started

* coverage
2023-02-26 22:37:25 -05:00
J. Nick Koston
ab6bd75b70 Fix flux_led discovery running at shutdown (#88817) 2023-02-26 22:37:24 -05:00
J. Nick Koston
2fff836bd4 Fix lock services not removing entity fields (#88805) 2023-02-26 22:37:23 -05:00
J. Nick Koston
d8850758f1 Fix unifiprotect discovery running at shutdown (#88802)
* Fix unifiprotect discovery running at shutdown

Move the discovery start into `async_setup` so we only
start discovery once reguardless of how many config entries
for unifiprotect they have (or how many times they reload).

Always make discovery a background task so it does not get
to block shutdown

* missing decorator
2023-02-26 22:37:22 -05:00
J. Nick Koston
0449856064 Bump yalexs-ble to 2.0.4 (#88798)
changelog: https://github.com/bdraco/yalexs-ble/compare/v2.0.3...v2.0.4
2023-02-26 22:37:21 -05:00
starkillerOG
e48089e0c9 Do not block on reolink firmware check fail (#88797)
Do not block on firmware check fail
2023-02-26 22:37:20 -05:00
starkillerOG
a7e081f70d Simplify reolink update unique_id (#88794)
simplify unique_id
2023-02-26 22:37:19 -05:00
Paulus Schoutsen
fe181425d8 Check circular dependencies (#88778) 2023-02-26 22:37:18 -05:00
Joakim Plate
8c7b29db25 Update nibe library to 2.0.0 (#88769) 2023-02-26 22:37:17 -05:00
J. Nick Koston
aaa5bb9f86 Fix checking if a package is installed on py3.11 (#88768)
pkg_resources is abandoned and we need to move away
from using it https://github.com/pypa/pkg_resources

In the mean time we need to keep it working. This fixes
a new exception in py3.11 when a module is not installed
which allows proper fallback to pkg_resources.Requirement.parse
when needed

```
2023-02-25 15:46:21.101 ERROR (MainThread) [aiohttp.server] Error handling request
Traceback (most recent call last):
  File "/opt/homebrew/lib/python3.11/site-packages/aiohttp/web_protocol.py", line 433, in _handle_request
    resp = await request_handler(request)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/homebrew/lib/python3.11/site-packages/aiohttp/web_app.py", line 504, in _handle
    resp = await handler(request)
           ^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/homebrew/lib/python3.11/site-packages/aiohttp/web_middlewares.py", line 117, in impl
    return await handler(request)
           ^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/components/http/security_filter.py", line 60, in security_filter_middleware
    return await handler(request)
           ^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/components/http/forwarded.py", line 100, in forwarded_middleware
    return await handler(request)
           ^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/components/http/request_context.py", line 28, in request_context_middleware
    return await handler(request)
           ^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/components/http/ban.py", line 80, in ban_middleware
    return await handler(request)
           ^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/components/http/auth.py", line 235, in auth_middleware
    return await handler(request)
           ^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/components/http/view.py", line 146, in handle
    result = await result
             ^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/components/config/config_entries.py", line 148, in post
    return await super().post(request)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/components/http/data_validator.py", line 72, in wrapper
    result = await method(view, request, data, *args, **kwargs)
             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/helpers/data_entry_flow.py", line 71, in post
    result = await self._flow_mgr.async_init(
             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 826, in async_init
    flow, result = await task
                   ^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 844, in _async_init
    flow = await self.async_create_flow(handler, context=context, data=data)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 950, in async_create_flow
    await async_process_deps_reqs(self.hass, self._hass_config, integration)
  File "/Users/bdraco/home-assistant/homeassistant/setup.py", line 384, in async_process_deps_reqs
    await requirements.async_get_integration_with_requirements(
  File "/Users/bdraco/home-assistant/homeassistant/requirements.py", line 52, in async_get_integration_with_requirements
    return await manager.async_get_integration_with_requirements(domain)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/requirements.py", line 171, in async_get_integration_with_requirements
    await self._async_process_integration(integration, done)
  File "/Users/bdraco/home-assistant/homeassistant/requirements.py", line 186, in _async_process_integration
    await self.async_process_requirements(
  File "/Users/bdraco/home-assistant/homeassistant/requirements.py", line 252, in async_process_requirements
    await self._async_process_requirements(name, missing)
  File "/Users/bdraco/home-assistant/homeassistant/requirements.py", line 284, in _async_process_requirements
    installed, failures = await self.hass.async_add_executor_job(
                          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/homebrew/Cellar/python@3.11/3.11.1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/concurrent/futures/thread.py", line 58, in run
    result = self.fn(*self.args, **self.kwargs)
             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/requirements.py", line 113, in _install_requirements_if_missing
    if pkg_util.is_installed(req) or _install_with_retry(req, kwargs):
       ^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/bdraco/home-assistant/homeassistant/util/package.py", line 40, in is_installed
    pkg_resources.get_distribution(package)
  File "/opt/homebrew/lib/python3.11/site-packages/pkg_resources/__init__.py", line 478, in get_distribution
    dist = get_provider(dist)
           ^^^^^^^^^^^^^^^^^^
  File "/opt/homebrew/lib/python3.11/site-packages/pkg_resources/__init__.py", line 354, in get_provider
    return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
                                            ~~~~~~~~~~~~~~~~~~~~~~~~~^^^
IndexError: list index out of range
``
2023-02-26 22:37:17 -05:00
J. Nick Koston
5b78e0c4ff Restore previous behavior of only waiting for new tasks at shutdown (#88740)
* Restore previous behavior of only waiting for new tasks at shutdown

* cleanup

* do a swap instead

* await canceled tasks

* await canceled tasks

* fix

* not needed since we no longer clear

* log it

* reword

* wait for airvisual

* tests
2023-02-26 22:37:16 -05:00
Franck Nijhof
2063dbf00d Bumped version to 2023.3.0b3 2023-02-25 12:07:47 +01:00
Joakim Sørensen
91a03ab83d Remove homeassistant_hardware after dependency from zha (#88751) 2023-02-25 12:07:25 +01:00
J. Nick Koston
ed8f538890 Prevent new discovery flows from being created when stopping (#88743) 2023-02-25 12:07:22 +01:00
J. Nick Koston
6196607c5d Make hass.async_stop an untracked task (#88738) 2023-02-25 12:07:19 +01:00
J. Nick Koston
833ccafb76 Log futures that are blocking shutdown stages (#88736) 2023-02-25 12:07:15 +01:00
mkmer
ca539d0a09 Add missing reauth strings to Honeywell (#88733)
Add missing reauth strings
2023-02-25 12:07:12 +01:00
Austin Mroczek
0e3e954000 Bump total_connect_client to v2023.2 (#88729)
* bump total_connect_client to v2023.2

* Trigger Build
2023-02-25 12:07:09 +01:00
avee87
4ef96c76e4 Fix log message in recorder on total_increasing reset (#88710) 2023-02-25 12:07:05 +01:00
Álvaro Fernández Rojas
d5b0c1faa0 Update aioqsw v0.3.2 (#88695)
Signed-off-by: Álvaro Fernández Rojas <noltari@gmail.com>
2023-02-25 12:07:02 +01:00
Arturo
2405908cdd Fix matter light color capabilities bit map (#88693)
* Adds matter light color capabilities bit map

* Fixed matter light hue and saturation test
2023-02-25 12:06:58 +01:00
Paulus Schoutsen
b6e50135f5 Bumped version to 2023.3.0b2 2023-02-24 21:41:02 -05:00
Bram Kragten
64197aa5f5 Update frontend to 20230224.0 (#88721) 2023-02-24 21:40:56 -05:00
J. Nick Koston
5a2d7a5dd4 Reduce overhead to save json data to postgresql (#88717)
* Reduce overhead to strip nulls from json

* Reduce overhead to strip nulls from json

* small cleanup
2023-02-24 21:40:55 -05:00
J. Nick Koston
2d6f84b2a8 Fix timeout in purpleapi test (#88715)
https://github.com/home-assistant/core/actions/runs/4264644494/jobs/7423099757
2023-02-24 21:40:54 -05:00
J. Nick Koston
0c6a469218 Fix migration failing when existing data has duplicates (#88712) 2023-02-24 21:40:53 -05:00
J. Nick Koston
e69271cb46 Bump aioesphomeapi to 13.4.1 (#88703)
changelog: https://github.com/esphome/aioesphomeapi/releases/tag/v13.4.1
2023-02-24 21:40:52 -05:00
Michael Hansen
02bd3f897d Make a copy of matching states so translated state names can be used (#88683) 2023-02-24 21:40:51 -05:00
J. Nick Koston
64ad5326dd Bump mopeka_iot_ble to 0.4.1 (#88680)
* Bump mopeka_iot_ble to 0.4.1

closes #88232

* adjust tests
2023-02-24 21:40:50 -05:00
puddly
74696a3fac Name the Yellow-internal radio and multi-PAN addon as ZHA serial ports (#88208)
* Expose the Yellow-internal radio and multi-PAN addon as named serial ports

* Remove the serial number if it isn't available

* Use consistent names for the addon and Zigbee radio

* Add `homeassistant_hardware` and `_yellow` as `after_dependencies`

* Handle `hassio` not existing when listing serial ports

* Add unit tests
2023-02-24 21:40:49 -05:00
Paulus Schoutsen
70e1d14da0 Bumped version to 2023.3.0b1 2023-02-23 15:00:13 -05:00
Bram Kragten
25f066d476 Update frontend to 20230223.0 (#88677) 2023-02-23 15:00:07 -05:00
Marcel van der Veldt
5adf1dcc90 Fix support for Bridge(d) and composed devices in Matter (#88662)
* Refactor discovery of entities to support composed and bridged devices

* Bump library version to 3.1.0

* move discovery schemas to platforms

* optimize a tiny bit

* simplify even more

* fixed bug in light platform

* fix color control logic

* fix some issues

* Update homeassistant/components/matter/discovery.py

Co-authored-by: Paulus Schoutsen <balloob@gmail.com>

* fix some tests

* fix light test

---------

Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
2023-02-23 15:00:05 -05:00
epenet
0fb28dcf9e Add missing async_setup_entry mock in openuv (#88661) 2023-02-23 15:00:04 -05:00
Allen Porter
2fddbcedcf Fix local calendar issue with events created with fixed UTC offsets (#88650)
Fix issue with events created with UTC offsets
2023-02-23 15:00:03 -05:00
J. Nick Koston
951df3df57 Fix untrapped exceptions during Yale Access Bluetooth first setup (#88642) 2023-02-23 15:00:02 -05:00
starkillerOG
35142e456a Bump reolink-aio to 0.5.1 and check if update supported (#88641) 2023-02-23 15:00:01 -05:00
Paulus Schoutsen
cfaba87dd6 Error checking for OTBR (#88620)
* Error checking for OTBR

* Other errors in flow too

* Tests
2023-02-23 15:00:00 -05:00
Erik Montnemery
2db8d4b73a Bump python-otbr-api to 1.0.4 (#88613)
* Bump python-otbr-api to 1.0.4

* Adjust tests
2023-02-23 14:59:59 -05:00
Raman Gupta
0d2006bf33 Add support for firmware target in zwave_js FirmwareUploadView (#88523)
* Add support for firmware target in zwave_js FirmwareUploadView

fix

* Update tests/components/zwave_js/test_api.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Update tests/components/zwave_js/test_api.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Update tests/components/zwave_js/test_api.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Update tests/components/zwave_js/test_api.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* fix types

* Switch back to using Any

---------

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2023-02-23 14:59:58 -05:00
puddly
45547d226e Disable the ZHA bellows UART thread when connecting to a TCP coordinator (#88202)
Disable the bellows UART thread when connecting to a TCP coordinator
2023-02-23 14:59:56 -05:00
Franck Nijhof
cebc6dd096 Bumped version to 2023.3.0b0 2023-02-22 20:44:37 +01:00
94 changed files with 1605 additions and 761 deletions

View File

@@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.2"]
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.4"]
}

View File

@@ -106,6 +106,8 @@ class ActiveBluetoothDataUpdateCoordinator(
def needs_poll(self, service_info: BluetoothServiceInfoBleak) -> bool:
"""Return true if time to try and poll."""
if self.hass.is_stopping:
return False
poll_age: float | None = None
if self._last_poll:
poll_age = monotonic_time_coarse() - self._last_poll

View File

@@ -99,6 +99,8 @@ class ActiveBluetoothProcessorCoordinator(
def needs_poll(self, service_info: BluetoothServiceInfoBleak) -> bool:
"""Return true if time to try and poll."""
if self.hass.is_stopping:
return False
poll_age: float | None = None
if self._last_poll:
poll_age = monotonic_time_coarse() - self._last_poll

View File

@@ -66,6 +66,55 @@ SCAN_INTERVAL = datetime.timedelta(seconds=60)
# Don't support rrules more often than daily
VALID_FREQS = {"DAILY", "WEEKLY", "MONTHLY", "YEARLY"}
def _has_consistent_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all datetime values have a consistent timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys that are datetime values have the same timezone."""
tzinfos = []
for key in keys:
if not (value := obj.get(key)) or not isinstance(value, datetime.datetime):
return obj
tzinfos.append(value.tzinfo)
uniq_values = groupby(tzinfos)
if len(list(uniq_values)) > 1:
raise vol.Invalid("Expected all values to have the same timezone")
return obj
return validate
def _as_local_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Convert all datetime values to the local timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys that are datetime values have the same timezone."""
for k in keys:
if (value := obj.get(k)) and isinstance(value, datetime.datetime):
obj[k] = dt.as_local(value)
return obj
return validate
def _is_sorted(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that the specified values are sequential."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys in the dict are in order."""
values = []
for k in keys:
if not (value := obj.get(k)):
return obj
values.append(value)
if all(values) and values != sorted(values):
raise vol.Invalid(f"Values were not in order: {values}")
return obj
return validate
CREATE_EVENT_SERVICE = "create_event"
CREATE_EVENT_SCHEMA = vol.All(
cv.has_at_least_one_key(EVENT_START_DATE, EVENT_START_DATETIME, EVENT_IN),
@@ -98,6 +147,10 @@ CREATE_EVENT_SCHEMA = vol.All(
),
},
),
_has_consistent_timezone(EVENT_START_DATETIME, EVENT_END_DATETIME),
_as_local_timezone(EVENT_START_DATETIME, EVENT_END_DATETIME),
_is_sorted(EVENT_START_DATE, EVENT_END_DATE),
_is_sorted(EVENT_START_DATETIME, EVENT_END_DATETIME),
)
@@ -441,36 +494,6 @@ def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
return validate
def _has_consistent_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all datetime values have a consistent timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys that are datetime values have the same timezone."""
values = [obj[k] for k in keys]
if all(isinstance(value, datetime.datetime) for value in values):
uniq_values = groupby(value.tzinfo for value in values)
if len(list(uniq_values)) > 1:
raise vol.Invalid(
f"Expected all values to have the same timezone: {values}"
)
return obj
return validate
def _is_sorted(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that the specified values are sequential."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys in the dict are in order."""
values = [obj[k] for k in keys]
if values != sorted(values):
raise vol.Invalid(f"Values were not in order: {values}")
return obj
return validate
@websocket_api.websocket_command(
{
vol.Required("type"): "calendar/event/create",
@@ -486,6 +509,7 @@ def _is_sorted(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
},
_has_same_type(EVENT_START, EVENT_END),
_has_consistent_timezone(EVENT_START, EVENT_END),
_as_local_timezone(EVENT_START, EVENT_END),
_is_sorted(EVENT_START, EVENT_END),
)
),
@@ -582,6 +606,7 @@ async def handle_calendar_event_delete(
},
_has_same_type(EVENT_START, EVENT_END),
_has_consistent_timezone(EVENT_START, EVENT_END),
_as_local_timezone(EVENT_START, EVENT_END),
_is_sorted(EVENT_START, EVENT_END),
)
),

View File

@@ -227,7 +227,21 @@ class DefaultAgent(AbstractConversationAgent):
intent_response: intent.IntentResponse,
recognize_result: RecognizeResult,
) -> str:
all_states = intent_response.matched_states + intent_response.unmatched_states
# Make copies of the states here so we can add translated names for responses.
matched: list[core.State] = []
for state in intent_response.matched_states:
state_copy = core.State.from_dict(state.as_dict())
if state_copy is not None:
matched.append(state_copy)
unmatched: list[core.State] = []
for state in intent_response.unmatched_states:
state_copy = core.State.from_dict(state.as_dict())
if state_copy is not None:
unmatched.append(state_copy)
all_states = matched + unmatched
domains = {state.domain for state in all_states}
translations = await translation.async_get_translations(
self.hass, language, "state", domains
@@ -262,13 +276,11 @@ class DefaultAgent(AbstractConversationAgent):
"query": {
# Entity states that matched the query (e.g, "on")
"matched": [
template.TemplateState(self.hass, state)
for state in intent_response.matched_states
template.TemplateState(self.hass, state) for state in matched
],
# Entity states that did not match the query
"unmatched": [
template.TemplateState(self.hass, state)
for state in intent_response.unmatched_states
template.TemplateState(self.hass, state) for state in unmatched
],
},
}

View File

@@ -14,6 +14,6 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["aioesphomeapi", "noiseprotocol"],
"requirements": ["aioesphomeapi==13.4.0", "esphome-dashboard-api==1.2.3"],
"requirements": ["aioesphomeapi==13.4.1", "esphome-dashboard-api==1.2.3"],
"zeroconf": ["_esphomelib._tcp.local."]
}

View File

@@ -87,14 +87,23 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass, STARTUP_SCAN_TIMEOUT
)
@callback
def _async_start_background_discovery(*_: Any) -> None:
"""Run discovery in the background."""
hass.async_create_background_task(_async_discovery(), "flux_led-discovery")
async def _async_discovery(*_: Any) -> None:
async_trigger_discovery(
hass, await async_discover_devices(hass, DISCOVER_SCAN_TIMEOUT)
)
async_trigger_discovery(hass, domain_data[FLUX_LED_DISCOVERY])
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _async_discovery)
async_track_time_interval(hass, _async_discovery, DISCOVERY_INTERVAL)
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STARTED, _async_start_background_discovery
)
async_track_time_interval(
hass, _async_start_background_discovery, DISCOVERY_INTERVAL
)
return True

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20230222.0"]
"requirements": ["home-assistant-frontend==20230224.0"]
}

View File

@@ -1,7 +1,6 @@
{
"domain": "hassio",
"name": "Home Assistant Supervisor",
"after_dependencies": ["panel_custom"],
"codeowners": ["@home-assistant/supervisor"],
"dependencies": ["http"],
"documentation": "https://www.home-assistant.io/integrations/hassio",

View File

@@ -7,6 +7,13 @@
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
}
},
"reauth_confirm": {
"title": "[%key:common::config_flow::title::reauth%]",
"description": "The Honeywell integration needs to re-authenticate your account",
"data": {
"password": "[%key:common::config_flow::data::password%]"
}
}
},
"error": {

View File

@@ -15,7 +15,9 @@ from pydantic import ValidationError
import voluptuous as vol
from homeassistant.components.calendar import (
EVENT_END,
EVENT_RRULE,
EVENT_START,
CalendarEntity,
CalendarEntityFeature,
CalendarEvent,
@@ -151,6 +153,21 @@ def _parse_event(event: dict[str, Any]) -> Event:
"""Parse an ical event from a home assistant event dictionary."""
if rrule := event.get(EVENT_RRULE):
event[EVENT_RRULE] = Recur.from_rrule(rrule)
# This function is called with new events created in the local timezone,
# however ical library does not properly return recurrence_ids for
# start dates with a timezone. For now, ensure any datetime is stored as a
# floating local time to ensure we still apply proper local timezone rules.
# This can be removed when ical is updated with a new recurrence_id format
# https://github.com/home-assistant/core/issues/87759
for key in (EVENT_START, EVENT_END):
if (
(value := event[key])
and isinstance(value, datetime)
and value.tzinfo is not None
):
event[key] = dt_util.as_local(value).replace(tzinfo=None)
try:
return Event.parse_obj(event)
except ValidationError as err:
@@ -162,8 +179,12 @@ def _get_calendar_event(event: Event) -> CalendarEvent:
"""Return a CalendarEvent from an API event."""
return CalendarEvent(
summary=event.summary,
start=event.start,
end=event.end,
start=dt_util.as_local(event.start)
if isinstance(event.start, datetime)
else event.start,
end=dt_util.as_local(event.end)
if isinstance(event.end, datetime)
else event.end,
description=event.description,
uid=event.uid,
rrule=event.rrule.as_rrule_str() if event.rrule else None,

View File

@@ -33,6 +33,7 @@ from homeassistant.helpers.config_validation import ( # noqa: F401
)
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.service import remove_entity_service_fields
from homeassistant.helpers.typing import ConfigType, StateType
_LOGGER = logging.getLogger(__name__)
@@ -92,7 +93,7 @@ async def _async_lock(entity: LockEntity, service_call: ServiceCall) -> None:
raise ValueError(
f"Code '{code}' for locking {entity.entity_id} doesn't match pattern {entity.code_format}"
)
await entity.async_lock(**service_call.data)
await entity.async_lock(**remove_entity_service_fields(service_call))
async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None:
@@ -102,7 +103,7 @@ async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None:
raise ValueError(
f"Code '{code}' for unlocking {entity.entity_id} doesn't match pattern {entity.code_format}"
)
await entity.async_unlock(**service_call.data)
await entity.async_unlock(**remove_entity_service_fields(service_call))
async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None:
@@ -112,7 +113,7 @@ async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None:
raise ValueError(
f"Code '{code}' for opening {entity.entity_id} doesn't match pattern {entity.code_format}"
)
await entity.async_open(**service_call.data)
await entity.async_open(**remove_entity_service_fields(service_call))
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:

View File

@@ -27,7 +27,7 @@ from .adapter import MatterAdapter
from .addon import get_addon_manager
from .api import async_register_api
from .const import CONF_INTEGRATION_CREATED_ADDON, CONF_USE_ADDON, DOMAIN, LOGGER
from .device_platform import DEVICE_PLATFORM
from .discovery import SUPPORTED_PLATFORMS
from .helpers import MatterEntryData, get_matter, get_node_from_device_entry
CONNECT_TIMEOUT = 10
@@ -101,12 +101,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
matter = MatterAdapter(hass, matter_client, entry)
hass.data[DOMAIN][entry.entry_id] = MatterEntryData(matter, listen_task)
await hass.config_entries.async_forward_entry_setups(entry, DEVICE_PLATFORM)
await hass.config_entries.async_forward_entry_setups(entry, SUPPORTED_PLATFORMS)
await matter.setup_nodes()
# If the listen task is already failed, we need to raise ConfigEntryNotReady
if listen_task.done() and (listen_error := listen_task.exception()) is not None:
await hass.config_entries.async_unload_platforms(entry, DEVICE_PLATFORM)
await hass.config_entries.async_unload_platforms(entry, SUPPORTED_PLATFORMS)
hass.data[DOMAIN].pop(entry.entry_id)
try:
await matter_client.disconnect()
@@ -142,7 +142,9 @@ async def _client_listen(
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, DEVICE_PLATFORM)
unload_ok = await hass.config_entries.async_unload_platforms(
entry, SUPPORTED_PLATFORMS
)
if unload_ok:
matter_entry_data: MatterEntryData = hass.data[DOMAIN].pop(entry.entry_id)

View File

@@ -3,11 +3,6 @@ from __future__ import annotations
from typing import TYPE_CHECKING, cast
from chip.clusters import Objects as all_clusters
from matter_server.client.models.node_device import (
AbstractMatterNodeDevice,
MatterBridgedNodeDevice,
)
from matter_server.common.models import EventType, ServerInfoMessage
from homeassistant.config_entries import ConfigEntry
@@ -17,12 +12,12 @@ from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN, ID_TYPE_DEVICE_ID, ID_TYPE_SERIAL, LOGGER
from .device_platform import DEVICE_PLATFORM
from .discovery import async_discover_entities
from .helpers import get_device_id
if TYPE_CHECKING:
from matter_server.client import MatterClient
from matter_server.client.models.node import MatterNode
from matter_server.client.models.node import MatterEndpoint, MatterNode
class MatterAdapter:
@@ -51,12 +46,8 @@ class MatterAdapter:
for node in await self.matter_client.get_nodes():
self._setup_node(node)
def node_added_callback(event: EventType, node: MatterNode | None) -> None:
def node_added_callback(event: EventType, node: MatterNode) -> None:
"""Handle node added event."""
if node is None:
# We can clean this up when we've improved the typing in the library.
# https://github.com/home-assistant-libs/python-matter-server/pull/153
raise RuntimeError("Node added event without node")
self._setup_node(node)
self.config_entry.async_on_unload(
@@ -67,48 +58,32 @@ class MatterAdapter:
"""Set up an node."""
LOGGER.debug("Setting up entities for node %s", node.node_id)
bridge_unique_id: str | None = None
if (
node.aggregator_device_type_instance is not None
and node.root_device_type_instance is not None
and node.root_device_type_instance.get_cluster(
all_clusters.BasicInformation
)
):
# create virtual (parent) device for bridge node device
bridge_device = MatterBridgedNodeDevice(
node.aggregator_device_type_instance
)
self._create_device_registry(bridge_device)
server_info = cast(ServerInfoMessage, self.matter_client.server_info)
bridge_unique_id = get_device_id(server_info, bridge_device)
for node_device in node.node_devices:
self._setup_node_device(node_device, bridge_unique_id)
for endpoint in node.endpoints.values():
# Node endpoints are translated into HA devices
self._setup_endpoint(endpoint)
def _create_device_registry(
self,
node_device: AbstractMatterNodeDevice,
bridge_unique_id: str | None = None,
endpoint: MatterEndpoint,
) -> None:
"""Create a device registry entry."""
"""Create a device registry entry for a MatterNode."""
server_info = cast(ServerInfoMessage, self.matter_client.server_info)
basic_info = node_device.device_info()
device_type_instances = node_device.device_type_instances()
basic_info = endpoint.device_info
name = basic_info.nodeLabel or basic_info.productLabel or basic_info.productName
name = basic_info.nodeLabel
if not name and isinstance(node_device, MatterBridgedNodeDevice):
# fallback name for Bridge
name = "Hub device"
elif not name and device_type_instances:
# use the productName if no node label is present
name = basic_info.productName
# handle bridged devices
bridge_device_id = None
if endpoint.is_bridged_device:
bridge_device_id = get_device_id(
server_info,
endpoint.node.endpoints[0],
)
bridge_device_id = f"{ID_TYPE_DEVICE_ID}_{bridge_device_id}"
node_device_id = get_device_id(
server_info,
node_device,
endpoint,
)
identifiers = {(DOMAIN, f"{ID_TYPE_DEVICE_ID}_{node_device_id}")}
# if available, we also add the serialnumber as identifier
@@ -124,50 +99,21 @@ class MatterAdapter:
sw_version=basic_info.softwareVersionString,
manufacturer=basic_info.vendorName,
model=basic_info.productName,
via_device=(DOMAIN, bridge_unique_id) if bridge_unique_id else None,
via_device=(DOMAIN, bridge_device_id) if bridge_device_id else None,
)
def _setup_node_device(
self, node_device: AbstractMatterNodeDevice, bridge_unique_id: str | None
) -> None:
"""Set up a node device."""
self._create_device_registry(node_device, bridge_unique_id)
def _setup_endpoint(self, endpoint: MatterEndpoint) -> None:
"""Set up a MatterEndpoint as HA Device."""
# pre-create device registry entry
self._create_device_registry(endpoint)
# run platform discovery from device type instances
for instance in node_device.device_type_instances():
created = False
for platform, devices in DEVICE_PLATFORM.items():
entity_descriptions = devices.get(instance.device_type)
if entity_descriptions is None:
continue
if not isinstance(entity_descriptions, list):
entity_descriptions = [entity_descriptions]
entities = []
for entity_description in entity_descriptions:
LOGGER.debug(
"Creating %s entity for %s (%s)",
platform,
instance.device_type.__name__,
hex(instance.device_type.device_type),
)
entities.append(
entity_description.entity_cls(
self.matter_client,
node_device,
instance,
entity_description,
)
)
self.platform_handlers[platform](entities)
created = True
if not created:
LOGGER.warning(
"Found unsupported device %s (%s)",
type(instance).__name__,
hex(instance.device_type.device_type),
)
for entity_info in async_discover_entities(endpoint):
LOGGER.debug(
"Creating %s entity for %s",
entity_info.platform,
entity_info.primary_attribute,
)
new_entity = entity_info.entity_class(
self.matter_client, endpoint, entity_info
)
self.platform_handlers[entity_info.platform]([new_entity])

View File

@@ -1,11 +1,9 @@
"""Matter binary sensors."""
from __future__ import annotations
from dataclasses import dataclass
from functools import partial
from chip.clusters import Objects as clusters
from matter_server.client.models import device_types
from chip.clusters.Objects import uint
from chip.clusters.Types import Nullable, NullValue
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -17,8 +15,9 @@ from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .entity import MatterEntity, MatterEntityDescriptionBaseClass
from .entity import MatterEntity
from .helpers import get_matter
from .models import MatterDiscoverySchema
async def async_setup_entry(
@@ -34,60 +33,70 @@ async def async_setup_entry(
class MatterBinarySensor(MatterEntity, BinarySensorEntity):
"""Representation of a Matter binary sensor."""
entity_description: MatterBinarySensorEntityDescription
@callback
def _update_from_device(self) -> None:
"""Update from device."""
self._attr_is_on = self.get_matter_attribute_value(
# We always subscribe to a single value
self.entity_description.subscribe_attributes[0],
)
value: bool | uint | int | Nullable | None
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
if value in (None, NullValue):
value = None
elif value_convert := self._entity_info.measurement_to_ha:
value = value_convert(value)
self._attr_is_on = value
class MatterOccupancySensor(MatterBinarySensor):
"""Representation of a Matter occupancy sensor."""
_attr_device_class = BinarySensorDeviceClass.OCCUPANCY
@callback
def _update_from_device(self) -> None:
"""Update from device."""
value = self.get_matter_attribute_value(
# We always subscribe to a single value
self.entity_description.subscribe_attributes[0],
)
# Discovery schema(s) to map Matter Attributes to HA entities
DISCOVERY_SCHEMAS = [
# device specific: translate Hue motion to sensor to HA Motion sensor
# instead of generic occupancy sensor
MatterDiscoverySchema(
platform=Platform.BINARY_SENSOR,
entity_description=BinarySensorEntityDescription(
key="HueMotionSensor",
device_class=BinarySensorDeviceClass.MOTION,
name="Motion",
),
entity_class=MatterBinarySensor,
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
vendor_id=(4107,),
product_name=("Hue motion sensor",),
measurement_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
),
MatterDiscoverySchema(
platform=Platform.BINARY_SENSOR,
entity_description=BinarySensorEntityDescription(
key="ContactSensor",
device_class=BinarySensorDeviceClass.DOOR,
name="Contact",
),
entity_class=MatterBinarySensor,
required_attributes=(clusters.BooleanState.Attributes.StateValue,),
# value is inverted on matter to what we expect
measurement_to_ha=lambda x: not x,
),
MatterDiscoverySchema(
platform=Platform.BINARY_SENSOR,
entity_description=BinarySensorEntityDescription(
key="OccupancySensor",
device_class=BinarySensorDeviceClass.OCCUPANCY,
name="Occupancy",
),
entity_class=MatterBinarySensor,
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
# The first bit = if occupied
self._attr_is_on = (value & 1 == 1) if value is not None else None
@dataclass
class MatterBinarySensorEntityDescription(
BinarySensorEntityDescription,
MatterEntityDescriptionBaseClass,
):
"""Matter Binary Sensor entity description."""
# You can't set default values on inherited data classes
MatterSensorEntityDescriptionFactory = partial(
MatterBinarySensorEntityDescription, entity_cls=MatterBinarySensor
)
DEVICE_ENTITY: dict[
type[device_types.DeviceType],
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
] = {
device_types.ContactSensor: MatterSensorEntityDescriptionFactory(
key=device_types.ContactSensor,
name="Contact",
subscribe_attributes=(clusters.BooleanState.Attributes.StateValue,),
device_class=BinarySensorDeviceClass.DOOR,
measurement_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
),
device_types.OccupancySensor: MatterSensorEntityDescriptionFactory(
key=device_types.OccupancySensor,
name="Occupancy",
entity_cls=MatterOccupancySensor,
subscribe_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
MatterDiscoverySchema(
platform=Platform.BINARY_SENSOR,
entity_description=BinarySensorEntityDescription(
key="BatteryChargeLevel",
device_class=BinarySensorDeviceClass.BATTERY,
name="Battery Status",
),
entity_class=MatterBinarySensor,
required_attributes=(clusters.PowerSource.Attributes.BatChargeLevel,),
# only add binary battery sensor if a regular percentage based is not available
absent_attributes=(clusters.PowerSource.Attributes.BatPercentRemaining,),
measurement_to_ha=lambda x: x != clusters.PowerSource.Enums.BatChargeLevel.kOk,
),
}
]

View File

@@ -1,30 +0,0 @@
"""All mappings of Matter devices to Home Assistant platforms."""
from __future__ import annotations
from typing import TYPE_CHECKING
from homeassistant.const import Platform
from .binary_sensor import DEVICE_ENTITY as BINARY_SENSOR_DEVICE_ENTITY
from .light import DEVICE_ENTITY as LIGHT_DEVICE_ENTITY
from .sensor import DEVICE_ENTITY as SENSOR_DEVICE_ENTITY
from .switch import DEVICE_ENTITY as SWITCH_DEVICE_ENTITY
if TYPE_CHECKING:
from matter_server.client.models.device_types import DeviceType
from .entity import MatterEntityDescriptionBaseClass
DEVICE_PLATFORM: dict[
Platform,
dict[
type[DeviceType],
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
],
] = {
Platform.BINARY_SENSOR: BINARY_SENSOR_DEVICE_ENTITY,
Platform.LIGHT: LIGHT_DEVICE_ENTITY,
Platform.SENSOR: SENSOR_DEVICE_ENTITY,
Platform.SWITCH: SWITCH_DEVICE_ENTITY,
}

View File

@@ -0,0 +1,115 @@
"""Map Matter Nodes and Attributes to Home Assistant entities."""
from __future__ import annotations
from collections.abc import Generator
from chip.clusters.Objects import ClusterAttributeDescriptor
from matter_server.client.models.node import MatterEndpoint
from homeassistant.const import Platform
from homeassistant.core import callback
from .binary_sensor import DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS
from .light import DISCOVERY_SCHEMAS as LIGHT_SCHEMAS
from .models import MatterDiscoverySchema, MatterEntityInfo
from .sensor import DISCOVERY_SCHEMAS as SENSOR_SCHEMAS
from .switch import DISCOVERY_SCHEMAS as SWITCH_SCHEMAS
DISCOVERY_SCHEMAS: dict[Platform, list[MatterDiscoverySchema]] = {
Platform.BINARY_SENSOR: BINARY_SENSOR_SCHEMAS,
Platform.LIGHT: LIGHT_SCHEMAS,
Platform.SENSOR: SENSOR_SCHEMAS,
Platform.SWITCH: SWITCH_SCHEMAS,
}
SUPPORTED_PLATFORMS = tuple(DISCOVERY_SCHEMAS.keys())
@callback
def iter_schemas() -> Generator[MatterDiscoverySchema, None, None]:
"""Iterate over all available discovery schemas."""
for platform_schemas in DISCOVERY_SCHEMAS.values():
yield from platform_schemas
@callback
def async_discover_entities(
endpoint: MatterEndpoint,
) -> Generator[MatterEntityInfo, None, None]:
"""Run discovery on MatterEndpoint and return matching MatterEntityInfo(s)."""
discovered_attributes: set[type[ClusterAttributeDescriptor]] = set()
device_info = endpoint.device_info
for schema in iter_schemas():
# abort if attribute(s) already discovered
if any(x in schema.required_attributes for x in discovered_attributes):
continue
# check vendor_id
if (
schema.vendor_id is not None
and device_info.vendorID not in schema.vendor_id
):
continue
# check product_name
if (
schema.product_name is not None
and device_info.productName not in schema.product_name
):
continue
# check required device_type
if schema.device_type is not None and not any(
x in schema.device_type for x in endpoint.device_types
):
continue
# check absent device_type
if schema.not_device_type is not None and any(
x in schema.not_device_type for x in endpoint.device_types
):
continue
# check endpoint_id
if (
schema.endpoint_id is not None
and endpoint.endpoint_id not in schema.endpoint_id
):
continue
# check required attributes
if schema.required_attributes is not None and not all(
endpoint.has_attribute(None, val_schema)
for val_schema in schema.required_attributes
):
continue
# check for values that may not be present
if schema.absent_attributes is not None and any(
endpoint.has_attribute(None, val_schema)
for val_schema in schema.absent_attributes
):
continue
# all checks passed, this value belongs to an entity
attributes_to_watch = list(schema.required_attributes)
if schema.optional_attributes:
# check optional attributes
for optional_attribute in schema.optional_attributes:
if optional_attribute in attributes_to_watch:
continue
if endpoint.has_attribute(None, optional_attribute):
attributes_to_watch.append(optional_attribute)
yield MatterEntityInfo(
endpoint=endpoint,
platform=schema.platform,
attributes_to_watch=attributes_to_watch,
entity_description=schema.entity_description,
entity_class=schema.entity_class,
measurement_to_ha=schema.measurement_to_ha,
)
# prevent re-discovery of the same attributes
if not schema.allow_multi:
discovered_attributes.update(attributes_to_watch)

View File

@@ -3,90 +3,77 @@ from __future__ import annotations
from abc import abstractmethod
from collections.abc import Callable
from dataclasses import dataclass
import logging
from typing import TYPE_CHECKING, Any, cast
from chip.clusters.Objects import ClusterAttributeDescriptor
from matter_server.client.models.device_type_instance import MatterDeviceTypeInstance
from matter_server.client.models.node_device import AbstractMatterNodeDevice
from matter_server.common.helpers.util import create_attribute_path
from matter_server.common.models import EventType, ServerInfoMessage
from homeassistant.core import callback
from homeassistant.helpers.entity import DeviceInfo, Entity, EntityDescription
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import DOMAIN, ID_TYPE_DEVICE_ID
from .helpers import get_device_id, get_operational_instance_id
from .helpers import get_device_id
if TYPE_CHECKING:
from matter_server.client import MatterClient
from matter_server.client.models.node import MatterEndpoint
from .discovery import MatterEntityInfo
LOGGER = logging.getLogger(__name__)
@dataclass
class MatterEntityDescription:
"""Mixin to map a matter device to a Home Assistant entity."""
entity_cls: type[MatterEntity]
subscribe_attributes: tuple
@dataclass
class MatterEntityDescriptionBaseClass(EntityDescription, MatterEntityDescription):
"""For typing a base class that inherits from both entity descriptions."""
class MatterEntity(Entity):
"""Entity class for Matter devices."""
entity_description: MatterEntityDescriptionBaseClass
_attr_should_poll = False
_attr_has_entity_name = True
def __init__(
self,
matter_client: MatterClient,
node_device: AbstractMatterNodeDevice,
device_type_instance: MatterDeviceTypeInstance,
entity_description: MatterEntityDescriptionBaseClass,
endpoint: MatterEndpoint,
entity_info: MatterEntityInfo,
) -> None:
"""Initialize the entity."""
self.matter_client = matter_client
self._node_device = node_device
self._device_type_instance = device_type_instance
self.entity_description = entity_description
self._endpoint = endpoint
self._entity_info = entity_info
self.entity_description = entity_info.entity_description
self._unsubscribes: list[Callable] = []
# for fast lookups we create a mapping to the attribute paths
self._attributes_map: dict[type, str] = {}
# The server info is set when the client connects to the server.
server_info = cast(ServerInfoMessage, self.matter_client.server_info)
# create unique_id based on "Operational Instance Name" and endpoint/device type
node_device_id = get_device_id(server_info, endpoint)
self._attr_unique_id = (
f"{get_operational_instance_id(server_info, self._node_device.node())}-"
f"{device_type_instance.endpoint.endpoint_id}-"
f"{device_type_instance.device_type.device_type}"
f"{node_device_id}-"
f"{endpoint.endpoint_id}-"
f"{entity_info.entity_description.key}-"
f"{entity_info.primary_attribute.cluster_id}-"
f"{entity_info.primary_attribute.attribute_id}"
)
node_device_id = get_device_id(server_info, node_device)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{ID_TYPE_DEVICE_ID}_{node_device_id}")}
)
self._attr_available = self._node_device.node().available
self._attr_available = self._endpoint.node.available
async def async_added_to_hass(self) -> None:
"""Handle being added to Home Assistant."""
await super().async_added_to_hass()
# Subscribe to attribute updates.
for attr_cls in self.entity_description.subscribe_attributes:
for attr_cls in self._entity_info.attributes_to_watch:
attr_path = self.get_matter_attribute_path(attr_cls)
self._attributes_map[attr_cls] = attr_path
self._unsubscribes.append(
self.matter_client.subscribe(
callback=self._on_matter_event,
event_filter=EventType.ATTRIBUTE_UPDATED,
node_filter=self._device_type_instance.node.node_id,
node_filter=self._endpoint.node.node_id,
attr_path_filter=attr_path,
)
)
@@ -95,7 +82,7 @@ class MatterEntity(Entity):
self.matter_client.subscribe(
callback=self._on_matter_event,
event_filter=EventType.NODE_UPDATED,
node_filter=self._device_type_instance.node.node_id,
node_filter=self._endpoint.node.node_id,
)
)
@@ -110,7 +97,7 @@ class MatterEntity(Entity):
@callback
def _on_matter_event(self, event: EventType, data: Any = None) -> None:
"""Call on update."""
self._attr_available = self._device_type_instance.node.available
self._attr_available = self._endpoint.node.available
self._update_from_device()
self.async_write_ha_state()
@@ -124,14 +111,13 @@ class MatterEntity(Entity):
self, attribute: type[ClusterAttributeDescriptor]
) -> Any:
"""Get current value for given attribute."""
return self._device_type_instance.get_attribute_value(None, attribute)
return self._endpoint.get_attribute_value(None, attribute)
@callback
def get_matter_attribute_path(
self, attribute: type[ClusterAttributeDescriptor]
) -> str:
"""Return AttributePath by providing the endpoint and Attribute class."""
endpoint = self._device_type_instance.endpoint.endpoint_id
return create_attribute_path(
endpoint, attribute.cluster_id, attribute.attribute_id
self._endpoint.endpoint_id, attribute.cluster_id, attribute.attribute_id
)

View File

@@ -11,8 +11,7 @@ from homeassistant.helpers import device_registry as dr
from .const import DOMAIN, ID_TYPE_DEVICE_ID
if TYPE_CHECKING:
from matter_server.client.models.node import MatterNode
from matter_server.client.models.node_device import AbstractMatterNodeDevice
from matter_server.client.models.node import MatterEndpoint, MatterNode
from matter_server.common.models import ServerInfoMessage
from .adapter import MatterAdapter
@@ -50,15 +49,21 @@ def get_operational_instance_id(
def get_device_id(
server_info: ServerInfoMessage,
node_device: AbstractMatterNodeDevice,
endpoint: MatterEndpoint,
) -> str:
"""Return HA device_id for the given MatterNodeDevice."""
operational_instance_id = get_operational_instance_id(
server_info, node_device.node()
)
# Append nodedevice(type) to differentiate between a root node
# and bridge within Home Assistant devices.
return f"{operational_instance_id}-{node_device.__class__.__name__}"
"""Return HA device_id for the given MatterEndpoint."""
operational_instance_id = get_operational_instance_id(server_info, endpoint.node)
# Append endpoint ID if this endpoint is a bridged or composed device
if endpoint.is_composed_device:
compose_parent = endpoint.node.get_compose_parent(endpoint.endpoint_id)
assert compose_parent is not None
postfix = str(compose_parent.endpoint_id)
elif endpoint.is_bridged_device:
postfix = str(endpoint.endpoint_id)
else:
# this should be compatible with previous versions
postfix = "MatterNodeDevice"
return f"{operational_instance_id}-{postfix}"
async def get_node_from_device_entry(
@@ -91,8 +96,8 @@ async def get_node_from_device_entry(
(
node
for node in await matter_client.get_nodes()
for node_device in node.node_devices
if get_device_id(server_info, node_device) == device_id
for endpoint in node.endpoints.values()
if get_device_id(server_info, endpoint) == device_id
),
None,
)

View File

@@ -1,9 +1,7 @@
"""Matter light."""
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from functools import partial
from enum import IntFlag
from typing import Any
from chip.clusters import Objects as clusters
@@ -24,8 +22,9 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import LOGGER
from .entity import MatterEntity, MatterEntityDescriptionBaseClass
from .entity import MatterEntity
from .helpers import get_matter
from .models import MatterDiscoverySchema
from .util import (
convert_to_hass_hs,
convert_to_hass_xy,
@@ -34,32 +33,13 @@ from .util import (
renormalize,
)
class MatterColorMode(Enum):
"""Matter color mode."""
HS = 0
XY = 1
COLOR_TEMP = 2
COLOR_MODE_MAP = {
MatterColorMode.HS: ColorMode.HS,
MatterColorMode.XY: ColorMode.XY,
MatterColorMode.COLOR_TEMP: ColorMode.COLOR_TEMP,
clusters.ColorControl.Enums.ColorMode.kCurrentHueAndCurrentSaturation: ColorMode.HS,
clusters.ColorControl.Enums.ColorMode.kCurrentXAndCurrentY: ColorMode.XY,
clusters.ColorControl.Enums.ColorMode.kColorTemperature: ColorMode.COLOR_TEMP,
}
class MatterColorControlFeatures(Enum):
"""Matter color control features."""
HS = 0 # Hue and saturation (Optional if device is color capable)
EHUE = 1 # Enhanced hue and saturation (Optional if device is color capable)
COLOR_LOOP = 2 # Color loop (Optional if device is color capable)
XY = 3 # XY (Mandatory if device is color capable)
COLOR_TEMP = 4 # Color temperature (Mandatory if device is color capable)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
@@ -73,63 +53,37 @@ async def async_setup_entry(
class MatterLight(MatterEntity, LightEntity):
"""Representation of a Matter light."""
entity_description: MatterLightEntityDescription
def _supports_feature(
self, feature_map: int, feature: MatterColorControlFeatures
) -> bool:
"""Return if device supports given feature."""
return (feature_map & (1 << feature.value)) != 0
def _supports_color_mode(self, color_feature: MatterColorControlFeatures) -> bool:
"""Return if device supports given color mode."""
feature_map = self.get_matter_attribute_value(
clusters.ColorControl.Attributes.FeatureMap,
)
assert isinstance(feature_map, int)
return self._supports_feature(feature_map, color_feature)
def _supports_hs_color(self) -> bool:
"""Return if device supports hs color."""
return self._supports_color_mode(MatterColorControlFeatures.HS)
def _supports_xy_color(self) -> bool:
"""Return if device supports xy color."""
return self._supports_color_mode(MatterColorControlFeatures.XY)
def _supports_color_temperature(self) -> bool:
"""Return if device supports color temperature."""
return self._supports_color_mode(MatterColorControlFeatures.COLOR_TEMP)
def _supports_brightness(self) -> bool:
"""Return if device supports brightness."""
entity_description: LightEntityDescription
@property
def supports_color(self) -> bool:
"""Return if the device supports color control."""
if not self._attr_supported_color_modes:
return False
return (
clusters.LevelControl.Attributes.CurrentLevel
in self.entity_description.subscribe_attributes
ColorMode.HS in self._attr_supported_color_modes
or ColorMode.XY in self._attr_supported_color_modes
)
def _supports_color(self) -> bool:
"""Return if device supports color."""
@property
def supports_color_temperature(self) -> bool:
"""Return if the device supports color temperature control."""
if not self._attr_supported_color_modes:
return False
return ColorMode.COLOR_TEMP in self._attr_supported_color_modes
return (
clusters.ColorControl.Attributes.ColorMode
in self.entity_description.subscribe_attributes
)
@property
def supports_brightness(self) -> bool:
"""Return if the device supports bridghtness control."""
if not self._attr_supported_color_modes:
return False
return ColorMode.BRIGHTNESS in self._attr_supported_color_modes
async def _set_xy_color(self, xy_color: tuple[float, float]) -> None:
"""Set xy color."""
matter_xy = convert_to_matter_xy(xy_color)
LOGGER.debug("Setting xy color to %s", matter_xy)
await self.send_device_command(
clusters.ColorControl.Commands.MoveToColor(
colorX=int(matter_xy[0]),
@@ -144,7 +98,6 @@ class MatterLight(MatterEntity, LightEntity):
matter_hs = convert_to_matter_hs(hs_color)
LOGGER.debug("Setting hs color to %s", matter_hs)
await self.send_device_command(
clusters.ColorControl.Commands.MoveToHueAndSaturation(
hue=int(matter_hs[0]),
@@ -157,7 +110,6 @@ class MatterLight(MatterEntity, LightEntity):
async def _set_color_temp(self, color_temp: int) -> None:
"""Set color temperature."""
LOGGER.debug("Setting color temperature to %s", color_temp)
await self.send_device_command(
clusters.ColorControl.Commands.MoveToColorTemperature(
colorTemperature=color_temp,
@@ -169,8 +121,7 @@ class MatterLight(MatterEntity, LightEntity):
async def _set_brightness(self, brightness: int) -> None:
"""Set brightness."""
LOGGER.debug("Setting brightness to %s", brightness)
level_control = self._device_type_instance.get_cluster(clusters.LevelControl)
level_control = self._endpoint.get_cluster(clusters.LevelControl)
assert level_control is not None
@@ -207,7 +158,7 @@ class MatterLight(MatterEntity, LightEntity):
LOGGER.debug(
"Got xy color %s for %s",
xy_color,
self._device_type_instance,
self.entity_id,
)
return xy_color
@@ -231,7 +182,7 @@ class MatterLight(MatterEntity, LightEntity):
LOGGER.debug(
"Got hs color %s for %s",
hs_color,
self._device_type_instance,
self.entity_id,
)
return hs_color
@@ -248,7 +199,7 @@ class MatterLight(MatterEntity, LightEntity):
LOGGER.debug(
"Got color temperature %s for %s",
color_temp,
self._device_type_instance,
self.entity_id,
)
return int(color_temp)
@@ -256,7 +207,7 @@ class MatterLight(MatterEntity, LightEntity):
def _get_brightness(self) -> int:
"""Get brightness from matter."""
level_control = self._device_type_instance.get_cluster(clusters.LevelControl)
level_control = self._endpoint.get_cluster(clusters.LevelControl)
# We should not get here if brightness is not supported.
assert level_control is not None
@@ -264,7 +215,7 @@ class MatterLight(MatterEntity, LightEntity):
LOGGER.debug( # type: ignore[unreachable]
"Got brightness %s for %s",
level_control.currentLevel,
self._device_type_instance,
self.entity_id,
)
return round(
@@ -284,10 +235,12 @@ class MatterLight(MatterEntity, LightEntity):
assert color_mode is not None
ha_color_mode = COLOR_MODE_MAP[MatterColorMode(color_mode)]
ha_color_mode = COLOR_MODE_MAP[color_mode]
LOGGER.debug(
"Got color mode (%s) for %s", ha_color_mode, self._device_type_instance
"Got color mode (%s) for %s",
ha_color_mode,
self.entity_id,
)
return ha_color_mode
@@ -295,8 +248,8 @@ class MatterLight(MatterEntity, LightEntity):
async def send_device_command(self, command: Any) -> None:
"""Send device command."""
await self.matter_client.send_device_command(
node_id=self._device_type_instance.node.node_id,
endpoint_id=self._device_type_instance.endpoint_id,
node_id=self._endpoint.node.node_id,
endpoint_id=self._endpoint.endpoint_id,
command=command,
)
@@ -308,15 +261,18 @@ class MatterLight(MatterEntity, LightEntity):
color_temp = kwargs.get(ATTR_COLOR_TEMP)
brightness = kwargs.get(ATTR_BRIGHTNESS)
if self._supports_color():
if hs_color is not None and self._supports_hs_color():
if self.supported_color_modes is not None:
if hs_color is not None and ColorMode.HS in self.supported_color_modes:
await self._set_hs_color(hs_color)
elif xy_color is not None and self._supports_xy_color():
elif xy_color is not None and ColorMode.XY in self.supported_color_modes:
await self._set_xy_color(xy_color)
elif color_temp is not None and self._supports_color_temperature():
elif (
color_temp is not None
and ColorMode.COLOR_TEMP in self.supported_color_modes
):
await self._set_color_temp(color_temp)
if brightness is not None and self._supports_brightness():
if brightness is not None and self.supports_brightness:
await self._set_brightness(brightness)
return
@@ -333,107 +289,81 @@ class MatterLight(MatterEntity, LightEntity):
@callback
def _update_from_device(self) -> None:
"""Update from device."""
supports_color = self._supports_color()
supports_color_temperature = (
self._supports_color_temperature() if supports_color else False
)
supports_brightness = self._supports_brightness()
if self._attr_supported_color_modes is None:
supported_color_modes = set()
if supports_color:
supported_color_modes.add(ColorMode.XY)
if self._supports_hs_color():
# work out what (color)features are supported
supported_color_modes: set[ColorMode] = set()
# brightness support
if self._entity_info.endpoint.has_attribute(
None, clusters.LevelControl.Attributes.CurrentLevel
):
supported_color_modes.add(ColorMode.BRIGHTNESS)
# colormode(s)
if self._entity_info.endpoint.has_attribute(
None, clusters.ColorControl.Attributes.ColorMode
):
capabilities = self.get_matter_attribute_value(
clusters.ColorControl.Attributes.ColorCapabilities
)
assert capabilities is not None
if capabilities & ColorCapabilities.kHueSaturationSupported:
supported_color_modes.add(ColorMode.HS)
if supports_color_temperature:
supported_color_modes.add(ColorMode.COLOR_TEMP)
if capabilities & ColorCapabilities.kXYAttributesSupported:
supported_color_modes.add(ColorMode.XY)
if supports_brightness:
supported_color_modes.add(ColorMode.BRIGHTNESS)
if capabilities & ColorCapabilities.kColorTemperatureSupported:
supported_color_modes.add(ColorMode.COLOR_TEMP)
self._attr_supported_color_modes = (
supported_color_modes if supported_color_modes else None
self._attr_supported_color_modes = supported_color_modes
LOGGER.debug(
"Supported color modes: %s for %s",
self._attr_supported_color_modes,
self.entity_id,
)
LOGGER.debug(
"Supported color modes: %s for %s",
self._attr_supported_color_modes,
self._device_type_instance,
)
# set current values
if supports_color:
if self.supports_color:
self._attr_color_mode = self._get_color_mode()
if self._attr_color_mode == ColorMode.HS:
self._attr_hs_color = self._get_hs_color()
else:
self._attr_xy_color = self._get_xy_color()
if supports_color_temperature:
if self.supports_color_temperature:
self._attr_color_temp = self._get_color_temperature()
self._attr_is_on = self.get_matter_attribute_value(
clusters.OnOff.Attributes.OnOff
)
if supports_brightness:
if self.supports_brightness:
self._attr_brightness = self._get_brightness()
@dataclass
class MatterLightEntityDescription(
LightEntityDescription,
MatterEntityDescriptionBaseClass,
):
"""Matter light entity description."""
# This enum should be removed once the ColorControlCapabilities enum is added to the CHIP (Matter) library
# clusters.ColorControl.Bitmap.ColorCapabilities
class ColorCapabilities(IntFlag):
"""Color control capabilities bitmap."""
kHueSaturationSupported = 0x1
kEnhancedHueSupported = 0x2
kColorLoopSupported = 0x4
kXYAttributesSupported = 0x8
kColorTemperatureSupported = 0x10
# You can't set default values on inherited data classes
MatterLightEntityDescriptionFactory = partial(
MatterLightEntityDescription, entity_cls=MatterLight
)
# Mapping of a Matter Device type to Light Entity Description.
# A Matter device type (instance) can consist of multiple attributes.
# For example a Color Light which has an attribute to control brightness
# but also for color.
DEVICE_ENTITY: dict[
type[device_types.DeviceType],
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
] = {
device_types.OnOffLight: MatterLightEntityDescriptionFactory(
key=device_types.OnOffLight,
subscribe_attributes=(clusters.OnOff.Attributes.OnOff,),
),
device_types.DimmableLight: MatterLightEntityDescriptionFactory(
key=device_types.DimmableLight,
subscribe_attributes=(
clusters.OnOff.Attributes.OnOff,
clusters.LevelControl.Attributes.CurrentLevel,
),
),
device_types.DimmablePlugInUnit: MatterLightEntityDescriptionFactory(
key=device_types.DimmablePlugInUnit,
subscribe_attributes=(
clusters.OnOff.Attributes.OnOff,
clusters.LevelControl.Attributes.CurrentLevel,
),
),
device_types.ColorTemperatureLight: MatterLightEntityDescriptionFactory(
key=device_types.ColorTemperatureLight,
subscribe_attributes=(
clusters.OnOff.Attributes.OnOff,
clusters.LevelControl.Attributes.CurrentLevel,
clusters.ColorControl.Attributes.ColorMode,
clusters.ColorControl.Attributes.ColorTemperatureMireds,
),
),
device_types.ExtendedColorLight: MatterLightEntityDescriptionFactory(
key=device_types.ExtendedColorLight,
subscribe_attributes=(
clusters.OnOff.Attributes.OnOff,
# Discovery schema(s) to map Matter Attributes to HA entities
DISCOVERY_SCHEMAS = [
MatterDiscoverySchema(
platform=Platform.LIGHT,
entity_description=LightEntityDescription(key="MatterLight"),
entity_class=MatterLight,
required_attributes=(clusters.OnOff.Attributes.OnOff,),
optional_attributes=(
clusters.LevelControl.Attributes.CurrentLevel,
clusters.ColorControl.Attributes.ColorMode,
clusters.ColorControl.Attributes.CurrentHue,
@@ -442,5 +372,7 @@ DEVICE_ENTITY: dict[
clusters.ColorControl.Attributes.CurrentY,
clusters.ColorControl.Attributes.ColorTemperatureMireds,
),
# restrict device type to prevent discovery in switch platform
not_device_type=(device_types.OnOffPlugInUnit,),
),
}
]

View File

@@ -6,5 +6,5 @@
"dependencies": ["websocket_api"],
"documentation": "https://www.home-assistant.io/integrations/matter",
"iot_class": "local_push",
"requirements": ["python-matter-server==3.0.0"]
"requirements": ["python-matter-server==3.1.0"]
}

View File

@@ -0,0 +1,109 @@
"""Models used for the Matter integration."""
from collections.abc import Callable
from dataclasses import asdict, dataclass
from typing import Any
from chip.clusters import Objects as clusters
from chip.clusters.Objects import ClusterAttributeDescriptor
from matter_server.client.models.device_types import DeviceType
from matter_server.client.models.node import MatterEndpoint
from homeassistant.const import Platform
from homeassistant.helpers.entity import EntityDescription
class DataclassMustHaveAtLeastOne:
"""A dataclass that must have at least one input parameter that is not None."""
def __post_init__(self) -> None:
"""Post dataclass initialization."""
if all(val is None for val in asdict(self).values()):
raise ValueError("At least one input parameter must not be None")
SensorValueTypes = type[
clusters.uint | int | clusters.Nullable | clusters.float32 | float
]
@dataclass
class MatterEntityInfo:
"""Info discovered from (primary) Matter Attribute to create entity."""
# MatterEndpoint to which the value(s) belongs
endpoint: MatterEndpoint
# the home assistant platform for which an entity should be created
platform: Platform
# All attributes that need to be watched by entity (incl. primary)
attributes_to_watch: list[type[ClusterAttributeDescriptor]]
# the entity description to use
entity_description: EntityDescription
# entity class to use to instantiate the entity
entity_class: type
# [optional] function to call to convert the value from the primary attribute
measurement_to_ha: Callable[[SensorValueTypes], SensorValueTypes] | None = None
@property
def primary_attribute(self) -> type[ClusterAttributeDescriptor]:
"""Return Primary Attribute belonging to the entity."""
return self.attributes_to_watch[0]
@dataclass
class MatterDiscoverySchema:
"""Matter discovery schema.
The Matter endpoint and it's (primary) Attribute for an entity must match these conditions.
"""
# specify the hass platform for which this scheme applies (e.g. light, sensor)
platform: Platform
# platform-specific entity description
entity_description: EntityDescription
# entity class to use to instantiate the entity
entity_class: type
# DISCOVERY OPTIONS
# [required] attributes that ALL need to be present
# on the node for this scheme to pass (minimal one == primary)
required_attributes: tuple[type[ClusterAttributeDescriptor], ...]
# [optional] the value's endpoint must contain this devicetype(s)
device_type: tuple[type[DeviceType] | DeviceType, ...] | None = None
# [optional] the value's endpoint must NOT contain this devicetype(s)
not_device_type: tuple[type[DeviceType] | DeviceType, ...] | None = None
# [optional] the endpoint's vendor_id must match ANY of these values
vendor_id: tuple[int, ...] | None = None
# [optional] the endpoint's product_name must match ANY of these values
product_name: tuple[str, ...] | None = None
# [optional] the attribute's endpoint_id must match ANY of these values
endpoint_id: tuple[int, ...] | None = None
# [optional] additional attributes that MAY NOT be present
# on the node for this scheme to pass
absent_attributes: tuple[type[ClusterAttributeDescriptor], ...] | None = None
# [optional] additional attributes that may be present
# these attributes are copied over to attributes_to_watch and
# are not discovered by other entities
optional_attributes: tuple[type[ClusterAttributeDescriptor], ...] | None = None
# [optional] bool to specify if this primary value may be discovered
# by multiple platforms
allow_multi: bool = False
# [optional] function to call to convert the value from the primary attribute
measurement_to_ha: Callable[[Any], Any] | None = None

View File

@@ -1,13 +1,8 @@
"""Matter sensors."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from functools import partial
from chip.clusters import Objects as clusters
from chip.clusters.Types import Nullable, NullValue
from matter_server.client.models import device_types
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -27,8 +22,9 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .entity import MatterEntity, MatterEntityDescriptionBaseClass
from .entity import MatterEntity
from .helpers import get_matter
from .models import MatterDiscoverySchema
async def async_setup_entry(
@@ -45,94 +41,94 @@ class MatterSensor(MatterEntity, SensorEntity):
"""Representation of a Matter sensor."""
_attr_state_class = SensorStateClass.MEASUREMENT
entity_description: MatterSensorEntityDescription
@callback
def _update_from_device(self) -> None:
"""Update from device."""
measurement: Nullable | float | None
measurement = self.get_matter_attribute_value(
# We always subscribe to a single value
self.entity_description.subscribe_attributes[0],
)
if measurement == NullValue or measurement is None:
measurement = None
else:
measurement = self.entity_description.measurement_to_ha(measurement)
self._attr_native_value = measurement
value: Nullable | float | None
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
if value in (None, NullValue):
value = None
elif value_convert := self._entity_info.measurement_to_ha:
value = value_convert(value)
self._attr_native_value = value
@dataclass
class MatterSensorEntityDescriptionMixin:
"""Required fields for sensor device mapping."""
measurement_to_ha: Callable[[float], float]
@dataclass
class MatterSensorEntityDescription(
SensorEntityDescription,
MatterEntityDescriptionBaseClass,
MatterSensorEntityDescriptionMixin,
):
"""Matter Sensor entity description."""
# You can't set default values on inherited data classes
MatterSensorEntityDescriptionFactory = partial(
MatterSensorEntityDescription, entity_cls=MatterSensor
)
DEVICE_ENTITY: dict[
type[device_types.DeviceType],
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
] = {
device_types.TemperatureSensor: MatterSensorEntityDescriptionFactory(
key=device_types.TemperatureSensor,
name="Temperature",
measurement_to_ha=lambda x: x / 100,
subscribe_attributes=(
clusters.TemperatureMeasurement.Attributes.MeasuredValue,
# Discovery schema(s) to map Matter Attributes to HA entities
DISCOVERY_SCHEMAS = [
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=SensorEntityDescription(
key="TemperatureSensor",
name="Temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
),
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
),
device_types.PressureSensor: MatterSensorEntityDescriptionFactory(
key=device_types.PressureSensor,
name="Pressure",
measurement_to_ha=lambda x: x / 10,
subscribe_attributes=(clusters.PressureMeasurement.Attributes.MeasuredValue,),
native_unit_of_measurement=UnitOfPressure.KPA,
device_class=SensorDeviceClass.PRESSURE,
),
device_types.FlowSensor: MatterSensorEntityDescriptionFactory(
key=device_types.FlowSensor,
name="Flow",
measurement_to_ha=lambda x: x / 10,
subscribe_attributes=(clusters.FlowMeasurement.Attributes.MeasuredValue,),
native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR,
),
device_types.HumiditySensor: MatterSensorEntityDescriptionFactory(
key=device_types.HumiditySensor,
name="Humidity",
entity_class=MatterSensor,
required_attributes=(clusters.TemperatureMeasurement.Attributes.MeasuredValue,),
measurement_to_ha=lambda x: x / 100,
subscribe_attributes=(
),
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=SensorEntityDescription(
key="PressureSensor",
name="Pressure",
native_unit_of_measurement=UnitOfPressure.KPA,
device_class=SensorDeviceClass.PRESSURE,
),
entity_class=MatterSensor,
required_attributes=(clusters.PressureMeasurement.Attributes.MeasuredValue,),
measurement_to_ha=lambda x: x / 10,
),
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=SensorEntityDescription(
key="FlowSensor",
name="Flow",
native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR,
device_class=SensorDeviceClass.WATER, # what is the device class here ?
),
entity_class=MatterSensor,
required_attributes=(clusters.FlowMeasurement.Attributes.MeasuredValue,),
measurement_to_ha=lambda x: x / 10,
),
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=SensorEntityDescription(
key="HumiditySensor",
name="Humidity",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.HUMIDITY,
),
entity_class=MatterSensor,
required_attributes=(
clusters.RelativeHumidityMeasurement.Attributes.MeasuredValue,
),
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.HUMIDITY,
measurement_to_ha=lambda x: x / 100,
),
device_types.LightSensor: MatterSensorEntityDescriptionFactory(
key=device_types.LightSensor,
name="Light",
measurement_to_ha=lambda x: round(pow(10, ((x - 1) / 10000)), 1),
subscribe_attributes=(
clusters.IlluminanceMeasurement.Attributes.MeasuredValue,
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=SensorEntityDescription(
key="LightSensor",
name="Illuminance",
native_unit_of_measurement=LIGHT_LUX,
device_class=SensorDeviceClass.ILLUMINANCE,
),
native_unit_of_measurement=LIGHT_LUX,
device_class=SensorDeviceClass.ILLUMINANCE,
entity_class=MatterSensor,
required_attributes=(clusters.IlluminanceMeasurement.Attributes.MeasuredValue,),
measurement_to_ha=lambda x: round(pow(10, ((x - 1) / 10000)), 1),
),
}
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=SensorEntityDescription(
key="PowerSource",
name="Battery",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
),
entity_class=MatterSensor,
required_attributes=(clusters.PowerSource.Attributes.BatPercentRemaining,),
# value has double precision
measurement_to_ha=lambda x: int(x / 2),
),
]

View File

@@ -1,8 +1,6 @@
"""Matter switches."""
from __future__ import annotations
from dataclasses import dataclass
from functools import partial
from typing import Any
from chip.clusters import Objects as clusters
@@ -18,8 +16,9 @@ from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .entity import MatterEntity, MatterEntityDescriptionBaseClass
from .entity import MatterEntity
from .helpers import get_matter
from .models import MatterDiscoverySchema
async def async_setup_entry(
@@ -35,21 +34,19 @@ async def async_setup_entry(
class MatterSwitch(MatterEntity, SwitchEntity):
"""Representation of a Matter switch."""
entity_description: MatterSwitchEntityDescription
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn switch on."""
await self.matter_client.send_device_command(
node_id=self._device_type_instance.node.node_id,
endpoint_id=self._device_type_instance.endpoint_id,
node_id=self._endpoint.node.node_id,
endpoint_id=self._endpoint.endpoint_id,
command=clusters.OnOff.Commands.On(),
)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn switch off."""
await self.matter_client.send_device_command(
node_id=self._device_type_instance.node.node_id,
endpoint_id=self._device_type_instance.endpoint_id,
node_id=self._endpoint.node.node_id,
endpoint_id=self._endpoint.endpoint_id,
command=clusters.OnOff.Commands.Off(),
)
@@ -57,31 +54,21 @@ class MatterSwitch(MatterEntity, SwitchEntity):
def _update_from_device(self) -> None:
"""Update from device."""
self._attr_is_on = self.get_matter_attribute_value(
clusters.OnOff.Attributes.OnOff
self._entity_info.primary_attribute
)
@dataclass
class MatterSwitchEntityDescription(
SwitchEntityDescription,
MatterEntityDescriptionBaseClass,
):
"""Matter Switch entity description."""
# You can't set default values on inherited data classes
MatterSwitchEntityDescriptionFactory = partial(
MatterSwitchEntityDescription, entity_cls=MatterSwitch
)
DEVICE_ENTITY: dict[
type[device_types.DeviceType],
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
] = {
device_types.OnOffPlugInUnit: MatterSwitchEntityDescriptionFactory(
key=device_types.OnOffPlugInUnit,
subscribe_attributes=(clusters.OnOff.Attributes.OnOff,),
device_class=SwitchDeviceClass.OUTLET,
# Discovery schema(s) to map Matter Attributes to HA entities
DISCOVERY_SCHEMAS = [
MatterDiscoverySchema(
platform=Platform.SWITCH,
entity_description=SwitchEntityDescription(
key="MatterPlug", device_class=SwitchDeviceClass.OUTLET
),
entity_class=MatterSwitch,
required_attributes=(clusters.OnOff.Attributes.OnOff,),
# restrict device type to prevent discovery by light
# platform which also uses OnOff cluster
not_device_type=(device_types.OnOffLight, device_types.DimmableLight),
),
}
]

View File

@@ -21,5 +21,5 @@
"documentation": "https://www.home-assistant.io/integrations/mopeka",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["mopeka_iot_ble==0.4.0"]
"requirements": ["mopeka_iot_ble==0.4.1"]
}

View File

@@ -8,11 +8,11 @@ from datetime import timedelta
from functools import cached_property
from typing import Any, Generic, TypeVar
from nibe.coil import Coil
from nibe.coil import Coil, CoilData
from nibe.connection import Connection
from nibe.connection.modbus import Modbus
from nibe.connection.nibegw import NibeGW, ProductInfo
from nibe.exceptions import CoilNotFoundException, CoilReadException
from nibe.exceptions import CoilNotFoundException, ReadException
from nibe.heatpump import HeatPump, Model, Series
from homeassistant.config_entries import ConfigEntry
@@ -182,7 +182,7 @@ class ContextCoordinator(
return release_update
class Coordinator(ContextCoordinator[dict[int, Coil], int]):
class Coordinator(ContextCoordinator[dict[int, CoilData], int]):
"""Update coordinator for nibe heat pumps."""
config_entry: ConfigEntry
@@ -199,17 +199,18 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
)
self.data = {}
self.seed: dict[int, Coil] = {}
self.seed: dict[int, CoilData] = {}
self.connection = connection
self.heatpump = heatpump
self.task: asyncio.Task | None = None
heatpump.subscribe(heatpump.COIL_UPDATE_EVENT, self._on_coil_update)
def _on_coil_update(self, coil: Coil):
def _on_coil_update(self, data: CoilData):
"""Handle callback on coil updates."""
self.data[coil.address] = coil
self.seed[coil.address] = coil
coil = data.coil
self.data[coil.address] = data
self.seed[coil.address] = data
self.async_update_context_listeners([coil.address])
@property
@@ -246,26 +247,26 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
async def async_write_coil(self, coil: Coil, value: int | float | str) -> None:
"""Write coil and update state."""
coil.value = value
coil = await self.connection.write_coil(coil)
data = CoilData(coil, value)
await self.connection.write_coil(data)
self.data[coil.address] = coil
self.data[coil.address] = data
self.async_update_context_listeners([coil.address])
async def async_read_coil(self, coil: Coil) -> Coil:
async def async_read_coil(self, coil: Coil) -> CoilData:
"""Read coil and update state using callbacks."""
return await self.connection.read_coil(coil)
async def _async_update_data(self) -> dict[int, Coil]:
async def _async_update_data(self) -> dict[int, CoilData]:
self.task = asyncio.current_task()
try:
return await self._async_update_data_internal()
finally:
self.task = None
async def _async_update_data_internal(self) -> dict[int, Coil]:
result: dict[int, Coil] = {}
async def _async_update_data_internal(self) -> dict[int, CoilData]:
result: dict[int, CoilData] = {}
def _get_coils() -> Iterable[Coil]:
for address in sorted(self.context_callbacks.keys()):
@@ -282,10 +283,10 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
yield coil
try:
async for coil in self.connection.read_coils(_get_coils()):
result[coil.address] = coil
self.seed.pop(coil.address, None)
except CoilReadException as exception:
async for data in self.connection.read_coils(_get_coils()):
result[data.coil.address] = data
self.seed.pop(data.coil.address, None)
except ReadException as exception:
if not result:
raise UpdateFailed(f"Failed to update: {exception}") from exception
self.logger.debug(
@@ -329,7 +330,7 @@ class CoilEntity(CoordinatorEntity[Coordinator]):
self.coordinator.data or {}
)
def _async_read_coil(self, coil: Coil):
def _async_read_coil(self, data: CoilData):
"""Update state of entity based on coil data."""
async def _async_write_coil(self, value: int | float | str):
@@ -337,10 +338,9 @@ class CoilEntity(CoordinatorEntity[Coordinator]):
await self.coordinator.async_write_coil(self._coil, value)
def _handle_coordinator_update(self) -> None:
coil = self.coordinator.data.get(self._coil.address)
if coil is None:
data = self.coordinator.data.get(self._coil.address)
if data is None:
return
self._coil = coil
self._async_read_coil(coil)
self._async_read_coil(data)
self.async_write_ha_state()

View File

@@ -1,7 +1,7 @@
"""The Nibe Heat Pump binary sensors."""
from __future__ import annotations
from nibe.coil import Coil
from nibe.coil import Coil, CoilData
from homeassistant.components.binary_sensor import ENTITY_ID_FORMAT, BinarySensorEntity
from homeassistant.config_entries import ConfigEntry
@@ -37,5 +37,5 @@ class BinarySensor(CoilEntity, BinarySensorEntity):
"""Initialize entity."""
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
def _async_read_coil(self, coil: Coil) -> None:
self._attr_is_on = coil.value == "ON"
def _async_read_coil(self, data: CoilData) -> None:
self._attr_is_on = data.value == "ON"

View File

@@ -8,10 +8,10 @@ from nibe.connection.nibegw import NibeGW
from nibe.exceptions import (
AddressInUseException,
CoilNotFoundException,
CoilReadException,
CoilReadSendException,
CoilWriteException,
CoilWriteSendException,
ReadException,
ReadSendException,
WriteException,
)
from nibe.heatpump import HeatPump, Model
import voluptuous as vol
@@ -108,13 +108,13 @@ async def validate_nibegw_input(
try:
await connection.verify_connectivity()
except (CoilReadSendException, CoilWriteSendException) as exception:
except (ReadSendException, CoilWriteSendException) as exception:
raise FieldError(str(exception), CONF_IP_ADDRESS, "address") from exception
except CoilNotFoundException as exception:
raise FieldError("Coils not found", "base", "model") from exception
except CoilReadException as exception:
except ReadException as exception:
raise FieldError("Timeout on read from pump", "base", "read") from exception
except CoilWriteException as exception:
except WriteException as exception:
raise FieldError("Timeout on writing to pump", "base", "write") from exception
finally:
await connection.stop()
@@ -147,13 +147,13 @@ async def validate_modbus_input(
try:
await connection.verify_connectivity()
except (CoilReadSendException, CoilWriteSendException) as exception:
except (ReadSendException, CoilWriteSendException) as exception:
raise FieldError(str(exception), CONF_MODBUS_URL, "address") from exception
except CoilNotFoundException as exception:
raise FieldError("Coils not found", "base", "model") from exception
except CoilReadException as exception:
except ReadException as exception:
raise FieldError("Timeout on read from pump", "base", "read") from exception
except CoilWriteException as exception:
except WriteException as exception:
raise FieldError("Timeout on writing to pump", "base", "write") from exception
finally:
await connection.stop()

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
"iot_class": "local_polling",
"requirements": ["nibe==1.6.0"]
"requirements": ["nibe==2.0.0"]
}

View File

@@ -1,7 +1,7 @@
"""The Nibe Heat Pump numbers."""
from __future__ import annotations
from nibe.coil import Coil
from nibe.coil import Coil, CoilData
from homeassistant.components.number import ENTITY_ID_FORMAT, NumberEntity
from homeassistant.config_entries import ConfigEntry
@@ -58,13 +58,13 @@ class Number(CoilEntity, NumberEntity):
self._attr_native_unit_of_measurement = coil.unit
self._attr_native_value = None
def _async_read_coil(self, coil: Coil) -> None:
if coil.value is None:
def _async_read_coil(self, data: CoilData) -> None:
if data.value is None:
self._attr_native_value = None
return
try:
self._attr_native_value = float(coil.value)
self._attr_native_value = float(data.value)
except ValueError:
self._attr_native_value = None

View File

@@ -1,7 +1,7 @@
"""The Nibe Heat Pump select."""
from __future__ import annotations
from nibe.coil import Coil
from nibe.coil import Coil, CoilData
from homeassistant.components.select import ENTITY_ID_FORMAT, SelectEntity
from homeassistant.config_entries import ConfigEntry
@@ -40,12 +40,12 @@ class Select(CoilEntity, SelectEntity):
self._attr_options = list(coil.mappings.values())
self._attr_current_option = None
def _async_read_coil(self, coil: Coil) -> None:
if not isinstance(coil.value, str):
def _async_read_coil(self, data: CoilData) -> None:
if not isinstance(data.value, str):
self._attr_current_option = None
return
self._attr_current_option = coil.value
self._attr_current_option = data.value
async def async_select_option(self, option: str) -> None:
"""Support writing value."""

View File

@@ -1,7 +1,7 @@
"""The Nibe Heat Pump sensors."""
from __future__ import annotations
from nibe.coil import Coil
from nibe.coil import Coil, CoilData
from homeassistant.components.sensor import (
ENTITY_ID_FORMAT,
@@ -146,5 +146,5 @@ class Sensor(CoilEntity, SensorEntity):
self._attr_native_unit_of_measurement = coil.unit
self._attr_entity_category = EntityCategory.DIAGNOSTIC
def _async_read_coil(self, coil: Coil):
self._attr_native_value = coil.value
def _async_read_coil(self, data: CoilData):
self._attr_native_value = data.value

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
from typing import Any
from nibe.coil import Coil
from nibe.coil import Coil, CoilData
from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity
from homeassistant.config_entries import ConfigEntry
@@ -40,8 +40,8 @@ class Switch(CoilEntity, SwitchEntity):
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
self._attr_is_on = None
def _async_read_coil(self, coil: Coil) -> None:
self._attr_is_on = coil.value == "ON"
def _async_read_coil(self, data: CoilData) -> None:
self._attr_is_on = data.value == "ON"
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""

View File

@@ -1,11 +1,13 @@
"""The Open Thread Border Router integration."""
from __future__ import annotations
import asyncio
from collections.abc import Callable, Coroutine
import dataclasses
from functools import wraps
from typing import Any, Concatenate, ParamSpec, TypeVar
import aiohttp
import python_otbr_api
from homeassistant.components.thread import async_add_dataset
@@ -63,8 +65,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
otbrdata = OTBRData(entry.data["url"], api)
try:
dataset = await otbrdata.get_active_dataset_tlvs()
except HomeAssistantError as err:
raise ConfigEntryNotReady from err
except (
HomeAssistantError,
aiohttp.ClientError,
asyncio.TimeoutError,
) as err:
raise ConfigEntryNotReady("Unable to connect") from err
if dataset:
await async_add_dataset(hass, entry.title, dataset.hex())

View File

@@ -1,8 +1,10 @@
"""Config flow for the Open Thread Border Router integration."""
from __future__ import annotations
import asyncio
import logging
import aiohttp
import python_otbr_api
import voluptuous as vol
@@ -48,7 +50,11 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN):
url = user_input[CONF_URL]
try:
await self._connect_and_create_dataset(url)
except python_otbr_api.OTBRError:
except (
python_otbr_api.OTBRError,
aiohttp.ClientError,
asyncio.TimeoutError,
):
errors["base"] = "cannot_connect"
else:
await self.async_set_unique_id(DOMAIN)

View File

@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/otbr",
"integration_type": "service",
"iot_class": "local_polling",
"requirements": ["python-otbr-api==1.0.3"]
"requirements": ["python-otbr-api==1.0.4"]
}

View File

@@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/qnap_qsw",
"iot_class": "local_polling",
"loggers": ["aioqsw"],
"requirements": ["aioqsw==0.3.1"]
"requirements": ["aioqsw==0.3.2"]
}

View File

@@ -13,6 +13,7 @@ from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text
from sqlalchemy.engine import CursorResult, Engine
from sqlalchemy.exc import (
DatabaseError,
IntegrityError,
InternalError,
OperationalError,
ProgrammingError,
@@ -778,9 +779,10 @@ def _apply_update( # noqa: C901
# Add name column to StatisticsMeta
_add_columns(session_maker, "statistics_meta", ["name VARCHAR(255)"])
elif new_version == 24:
_LOGGER.debug("Deleting duplicated statistics entries")
with session_scope(session=session_maker()) as session:
delete_statistics_duplicates(hass, session)
# This used to create the unique indices for start and statistic_id
# but we changed the format in schema 34 which will now take care
# of removing any duplicate if they still exist.
pass
elif new_version == 25:
_add_columns(session_maker, "states", [f"attributes_id {big_int}"])
_create_index(session_maker, "states", "ix_states_attributes_id")
@@ -907,7 +909,26 @@ def _apply_update( # noqa: C901
"statistics_short_term",
"ix_statistics_short_term_statistic_id_start_ts",
)
_migrate_statistics_columns_to_timestamp(session_maker, engine)
try:
_migrate_statistics_columns_to_timestamp(session_maker, engine)
except IntegrityError as ex:
_LOGGER.error(
"Statistics table contains duplicate entries: %s; "
"Cleaning up duplicates and trying again; "
"This will take a while; "
"Please be patient!",
ex,
)
# There may be duplicated statistics entries, delete duplicates
# and try again
with session_scope(session=session_maker()) as session:
delete_statistics_duplicates(hass, session)
_migrate_statistics_columns_to_timestamp(session_maker, engine)
# Log at error level to ensure the user sees this message in the log
# since we logged the error above.
_LOGGER.error(
"Statistics migration successfully recovered after statistics table duplicate cleanup"
)
elif new_version == 35:
# Migration is done in two steps to ensure we can start using
# the new columns before we wipe the old ones.

View File

@@ -125,7 +125,7 @@ def session_scope(
need_rollback = True
session.commit()
except Exception as err: # pylint: disable=broad-except
_LOGGER.error("Error executing query: %s", err)
_LOGGER.error("Error executing query: %s", err, exc_info=True)
if need_rollback:
session.rollback()
if not exception_filter or not exception_filter(err):

View File

@@ -79,6 +79,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
async def async_check_firmware_update():
"""Check for firmware updates."""
if not host.api.supported(None, "update"):
return False
async with async_timeout.timeout(host.api.timeout):
try:
return await host.api.check_new_firmware()
@@ -103,9 +106,10 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
)
# Fetch initial data so we have data when entities subscribe
try:
# If camera WAN blocked, firmware check fails, do not prevent setup
await asyncio.gather(
device_coordinator.async_config_entry_first_refresh(),
firmware_coordinator.async_config_entry_first_refresh(),
firmware_coordinator.async_refresh(),
)
except ConfigEntryNotReady:
await host.stop()

View File

@@ -13,5 +13,5 @@
"documentation": "https://www.home-assistant.io/integrations/reolink",
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"requirements": ["reolink-aio==0.5.0"]
"requirements": ["reolink-aio==0.5.1"]
}

View File

@@ -30,7 +30,8 @@ async def async_setup_entry(
) -> None:
"""Set up update entities for Reolink component."""
reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities([ReolinkUpdateEntity(reolink_data)])
if reolink_data.host.api.supported(None, "update"):
async_add_entities([ReolinkUpdateEntity(reolink_data)])
class ReolinkUpdateEntity(ReolinkBaseCoordinatorEntity, UpdateEntity):
@@ -48,7 +49,7 @@ class ReolinkUpdateEntity(ReolinkBaseCoordinatorEntity, UpdateEntity):
"""Initialize a Netgear device."""
super().__init__(reolink_data, reolink_data.firmware_coordinator)
self._attr_unique_id = f"{self._host.unique_id}_update"
self._attr_unique_id = f"{self._host.unique_id}"
@property
def installed_version(self) -> str | None:

View File

@@ -588,8 +588,8 @@ def _compile_statistics( # noqa: C901
),
entity_id,
new_state,
state.last_updated.isoformat(),
fstate,
state.last_updated.isoformat(),
)
except HomeAssistantError:
continue

View File

@@ -7,6 +7,6 @@
"documentation": "https://www.home-assistant.io/integrations/thread",
"integration_type": "service",
"iot_class": "local_polling",
"requirements": ["python-otbr-api==1.0.3", "pyroute2==0.7.5"],
"requirements": ["python-otbr-api==1.0.4", "pyroute2==0.7.5"],
"zeroconf": ["_meshcop._udp.local."]
}

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/totalconnect",
"iot_class": "cloud_polling",
"loggers": ["total_connect_client"],
"requirements": ["total_connect_client==2023.1"]
"requirements": ["total_connect_client==2023.2"]
}

View File

@@ -14,6 +14,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr, issue_registry as ir
from homeassistant.helpers.issue_registry import IssueSeverity
from homeassistant.helpers.typing import ConfigType
from .const import (
CONF_ALLOW_EA,
@@ -40,10 +41,15 @@ _LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=DEFAULT_SCAN_INTERVAL)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the UniFi Protect."""
# Only start discovery once regardless of how many entries they have
async_start_discovery(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up the UniFi Protect config entries."""
async_start_discovery(hass)
protect = async_create_api_client(hass, entry)
_LOGGER.debug("Connect to UniFi Protect")
data_service = ProtectData(hass, protect, SCAN_INTERVAL, entry)

View File

@@ -29,13 +29,19 @@ def async_start_discovery(hass: HomeAssistant) -> None:
return
domain_data[DISCOVERY] = True
async def _async_discovery(*_: Any) -> None:
async def _async_discovery() -> None:
async_trigger_discovery(hass, await async_discover_devices())
# Do not block startup since discovery takes 31s or more
hass.async_create_background_task(_async_discovery(), "unifiprotect-discovery")
@callback
def _async_start_background_discovery(*_: Any) -> None:
"""Run discovery in the background."""
hass.async_create_background_task(_async_discovery(), "unifiprotect-discovery")
async_track_time_interval(hass, _async_discovery, DISCOVERY_INTERVAL)
# Do not block startup since discovery takes 31s or more
_async_start_background_discovery()
async_track_time_interval(
hass, _async_start_background_discovery, DISCOVERY_INTERVAL
)
async def async_discover_devices() -> list[UnifiDevice]:

View File

@@ -1,6 +1,8 @@
"""The Yale Access Bluetooth integration."""
from __future__ import annotations
import asyncio
from yalexs_ble import (
AuthError,
ConnectionInfo,
@@ -62,7 +64,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await push_lock.wait_for_first_update(DEVICE_TIMEOUT)
except AuthError as ex:
raise ConfigEntryAuthFailed(str(ex)) from ex
except YaleXSBLEError as ex:
except (YaleXSBLEError, asyncio.TimeoutError) as ex:
raise ConfigEntryNotReady(
f"{ex}; Try moving the Bluetooth adapter closer to {local_name}"
) from ex

View File

@@ -12,5 +12,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/yalexs_ble",
"iot_class": "local_push",
"requirements": ["yalexs-ble==2.0.2"]
"requirements": ["yalexs-ble==2.0.4"]
}

View File

@@ -7,6 +7,7 @@ import json
from typing import Any
import serial.tools.list_ports
from serial.tools.list_ports_common import ListPortInfo
import voluptuous as vol
import zigpy.backups
from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH
@@ -14,9 +15,13 @@ from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH
from homeassistant import config_entries
from homeassistant.components import onboarding, usb, zeroconf
from homeassistant.components.file_upload import process_uploaded_file
from homeassistant.components.hassio import AddonError, AddonState
from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon
from homeassistant.components.homeassistant_yellow import hardware as yellow_hardware
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowHandler, FlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.selector import FileSelector, FileSelectorConfig
from homeassistant.util import dt
@@ -72,6 +77,41 @@ def _format_backup_choice(
return f"{dt.as_local(backup.backup_time).strftime('%c')} ({identifier})"
async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]:
"""List all serial ports, including the Yellow radio and the multi-PAN addon."""
ports = await hass.async_add_executor_job(serial.tools.list_ports.comports)
# Add useful info to the Yellow's serial port selection screen
try:
yellow_hardware.async_info(hass)
except HomeAssistantError:
pass
else:
yellow_radio = next(p for p in ports if p.device == "/dev/ttyAMA1")
yellow_radio.description = "Yellow Zigbee module"
yellow_radio.manufacturer = "Nabu Casa"
# Present the multi-PAN addon as a setup option, if it's available
addon_manager = silabs_multiprotocol_addon.get_addon_manager(hass)
try:
addon_info = await addon_manager.async_get_addon_info()
except (AddonError, KeyError):
addon_info = None
if addon_info is not None and addon_info.state != AddonState.NOT_INSTALLED:
addon_port = ListPortInfo(
device=silabs_multiprotocol_addon.get_zigbee_socket(hass, addon_info),
skip_link_detection=True,
)
addon_port.description = "Multiprotocol add-on"
addon_port.manufacturer = "Nabu Casa"
ports.append(addon_port)
return ports
class BaseZhaFlow(FlowHandler):
"""Mixin for common ZHA flow steps and forms."""
@@ -120,9 +160,9 @@ class BaseZhaFlow(FlowHandler):
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Choose a serial port."""
ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports)
ports = await list_serial_ports(self.hass)
list_of_ports = [
f"{p}, s/n: {p.serial_number or 'n/a'}"
f"{p}{', s/n: ' + p.serial_number if p.serial_number else ''}"
+ (f" - {p.manufacturer}" if p.manufacturer else "")
for p in ports
]
@@ -146,7 +186,7 @@ class BaseZhaFlow(FlowHandler):
return await self.async_step_manual_pick_radio_type()
self._title = (
f"{port.description}, s/n: {port.serial_number or 'n/a'}"
f"{port.description}{', s/n: ' + port.serial_number if port.serial_number else ''}"
f" - {port.manufacturer}"
if port.manufacturer
else ""

View File

@@ -139,6 +139,7 @@ CONF_ENABLE_QUIRKS = "enable_quirks"
CONF_FLOWCONTROL = "flow_control"
CONF_RADIO_TYPE = "radio_type"
CONF_USB_PATH = "usb_path"
CONF_USE_THREAD = "use_thread"
CONF_ZIGPY = "zigpy_config"
CONF_CONSIDER_UNAVAILABLE_MAINS = "consider_unavailable_mains"

View File

@@ -40,7 +40,9 @@ from .const import (
ATTR_SIGNATURE,
ATTR_TYPE,
CONF_DATABASE,
CONF_DEVICE_PATH,
CONF_RADIO_TYPE,
CONF_USE_THREAD,
CONF_ZIGPY,
DATA_ZHA,
DATA_ZHA_BRIDGE_ID,
@@ -167,6 +169,15 @@ class ZHAGateway:
app_config[CONF_DATABASE] = database
app_config[CONF_DEVICE] = self.config_entry.data[CONF_DEVICE]
# The bellows UART thread sometimes propagates a cancellation into the main Core
# event loop, when a connection to a TCP coordinator fails in a specific way
if (
CONF_USE_THREAD not in app_config
and RadioType[radio_type] is RadioType.ezsp
and app_config[CONF_DEVICE][CONF_DEVICE_PATH].startswith("socket://")
):
app_config[CONF_USE_THREAD] = False
app_config = app_controller_cls.SCHEMA(app_config)
for attempt in range(STARTUP_RETRIES):

View File

@@ -1,7 +1,7 @@
{
"domain": "zha",
"name": "Zigbee Home Automation",
"after_dependencies": ["onboarding", "usb", "zeroconf"],
"after_dependencies": ["onboarding", "usb"],
"codeowners": ["@dmulcahey", "@adminiuga", "@puddly"],
"config_flow": true,
"dependencies": ["file_upload"],

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
import dataclasses
from functools import partial, wraps
from typing import Any, Literal
from typing import Any, Literal, cast
from aiohttp import web, web_exceptions, web_request
import voluptuous as vol
@@ -2186,6 +2186,9 @@ class FirmwareUploadView(HomeAssistantView):
additional_user_agent_components=USER_AGENT,
)
else:
firmware_target: int | None = None
if "target" in data:
firmware_target = int(cast(str, data["target"]))
await update_firmware(
node.client.ws_server_url,
node,
@@ -2193,6 +2196,7 @@ class FirmwareUploadView(HomeAssistantView):
NodeFirmwareUpdateData(
uploaded_file.filename,
await hass.async_add_executor_job(uploaded_file.file.read),
firmware_target=firmware_target,
)
],
async_get_clientsession(hass),

View File

@@ -445,6 +445,10 @@ class ConfigEntry:
async def setup_again(*_: Any) -> None:
"""Run setup again."""
# Check again when we fire in case shutdown
# has started so we do not block shutdown
if hass.is_stopping:
return
self._async_cancel_retry_setup = None
await self.async_setup(hass, integration=integration, tries=tries)

View File

@@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2023
MINOR_VERSION: Final = 3
PATCH_VERSION: Final = "0.dev0"
PATCH_VERSION: Final = "0b4"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)

View File

@@ -38,6 +38,7 @@ from typing import (
)
from urllib.parse import urlparse
import async_timeout
from typing_extensions import Self
import voluptuous as vol
import yarl
@@ -711,6 +712,14 @@ class HomeAssistant:
"Stopping Home Assistant before startup has completed may fail"
)
# Keep holding the reference to the tasks but do not allow them
# to block shutdown. Only tasks created after this point will
# be waited for.
running_tasks = self._tasks
# Avoid clearing here since we want the remove callbacks to fire
# and remove the tasks from the original set which is now running_tasks
self._tasks = set()
# Cancel all background tasks
for task in self._background_tasks:
self._tasks.add(task)
@@ -730,6 +739,7 @@ class HomeAssistant:
"Timed out waiting for shutdown stage 1 to complete, the shutdown will"
" continue"
)
self._async_log_running_tasks(1)
# stage 2
self.state = CoreState.final_write
@@ -742,11 +752,41 @@ class HomeAssistant:
"Timed out waiting for shutdown stage 2 to complete, the shutdown will"
" continue"
)
self._async_log_running_tasks(2)
# stage 3
self.state = CoreState.not_running
self.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE)
# Make a copy of running_tasks since a task can finish
# while we are awaiting canceled tasks to get their result
# which will result in the set size changing during iteration
for task in list(running_tasks):
if task.done():
# Since we made a copy we need to check
# to see if the task finished while we
# were awaiting another task
continue
_LOGGER.warning(
"Task %s was still running after stage 2 shutdown; "
"Integrations should cancel non-critical tasks when receiving "
"the stop event to prevent delaying shutdown",
task,
)
task.cancel()
try:
async with async_timeout.timeout(0.1):
await task
except asyncio.CancelledError:
pass
except asyncio.TimeoutError:
# Task may be shielded from cancellation.
_LOGGER.exception(
"Task %s could not be canceled during stage 3 shutdown", task
)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.exception("Task %s error during stage 3 shutdown: %s", task, ex)
# Prevent run_callback_threadsafe from scheduling any additional
# callbacks in the event loop as callbacks created on the futures
# it returns will never run after the final `self.async_block_till_done`
@@ -762,11 +802,18 @@ class HomeAssistant:
"Timed out waiting for shutdown stage 3 to complete, the shutdown will"
" continue"
)
self._async_log_running_tasks(3)
self.state = CoreState.stopped
if self._stopped is not None:
self._stopped.set()
def _async_log_running_tasks(self, stage: int) -> None:
"""Log all running tasks."""
for task in self._tasks:
_LOGGER.warning("Shutdown stage %s: still running: %s", stage, task)
class Context:
"""The context that triggered something."""

View File

@@ -44,7 +44,9 @@ def _async_init_flow(
# as ones in progress as it may cause additional device probing
# which can overload devices since zeroconf/ssdp updates can happen
# multiple times in the same minute
if hass.config_entries.flow.async_has_matching_flow(domain, context, data):
if hass.is_stopping or hass.config_entries.flow.async_has_matching_flow(
domain, context, data
):
return None
return hass.config_entries.flow.async_init(domain, context=context, data=data)

View File

@@ -83,38 +83,28 @@ def json_bytes(data: Any) -> bytes:
)
def _strip_null(obj: Any) -> Any:
"""Strip NUL from an object."""
if isinstance(obj, str):
return obj.split("\0", 1)[0]
if isinstance(obj, dict):
return {key: _strip_null(o) for key, o in obj.items()}
if isinstance(obj, list):
return [_strip_null(o) for o in obj]
return obj
def json_bytes_strip_null(data: Any) -> bytes:
"""Dump json bytes after terminating strings at the first NUL."""
def process_dict(_dict: dict[Any, Any]) -> dict[Any, Any]:
"""Strip NUL from items in a dict."""
return {key: strip_null(o) for key, o in _dict.items()}
def process_list(_list: list[Any]) -> list[Any]:
"""Strip NUL from items in a list."""
return [strip_null(o) for o in _list]
def strip_null(obj: Any) -> Any:
"""Strip NUL from an object."""
if isinstance(obj, str):
return obj.split("\0", 1)[0]
if isinstance(obj, dict):
return process_dict(obj)
if isinstance(obj, list):
return process_list(obj)
return obj
# We expect null-characters to be very rare, hence try encoding first and look
# for an escaped null-character in the output.
result = json_bytes(data)
if b"\\u0000" in result:
# We work on the processed result so we don't need to worry about
# Home Assistant extensions which allows encoding sets, tuples, etc.
data_processed = orjson.loads(result)
data_processed = strip_null(data_processed)
result = json_bytes(data_processed)
if b"\\u0000" not in result:
return result
return result
# We work on the processed result so we don't need to worry about
# Home Assistant extensions which allows encoding sets, tuples, etc.
return json_bytes(_strip_null(orjson.loads(result)))
def json_dumps(data: Any) -> str:

View File

@@ -513,6 +513,16 @@ async def async_get_all_descriptions(
return descriptions
@callback
def remove_entity_service_fields(call: ServiceCall) -> dict[Any, Any]:
"""Remove entity service fields."""
return {
key: val
for key, val in call.data.items()
if key not in cv.ENTITY_SERVICE_FIELDS
}
@callback
@bind_hass
def async_set_service_schema(
@@ -567,11 +577,7 @@ async def entity_service_call( # noqa: C901
# If the service function is a string, we'll pass it the service call data
if isinstance(func, str):
data: dict | ServiceCall = {
key: val
for key, val in call.data.items()
if key not in cv.ENTITY_SERVICE_FIELDS
}
data: dict | ServiceCall = remove_entity_service_fields(call)
# If the service function is not a string, we pass the service call
else:
data = call

View File

@@ -1,4 +1,5 @@
"""Signal handling related helpers."""
import asyncio
import logging
import signal
@@ -23,7 +24,9 @@ def async_register_signal_handling(hass: HomeAssistant) -> None:
"""
hass.loop.remove_signal_handler(signal.SIGTERM)
hass.loop.remove_signal_handler(signal.SIGINT)
hass.async_create_task(hass.async_stop(exit_code))
hass.data["homeassistant_stop"] = asyncio.create_task(
hass.async_stop(exit_code)
)
try:
hass.loop.add_signal_handler(signal.SIGTERM, async_signal_handle, 0)

View File

@@ -23,7 +23,7 @@ fnvhash==0.1.0
hass-nabucasa==0.61.0
hassil==1.0.5
home-assistant-bluetooth==1.9.3
home-assistant-frontend==20230222.0
home-assistant-frontend==20230224.0
home-assistant-intents==2023.2.22
httpx==0.23.3
ifaddr==0.1.7

View File

@@ -39,7 +39,7 @@ def is_installed(package: str) -> bool:
try:
pkg_resources.get_distribution(package)
return True
except (pkg_resources.ResolutionError, pkg_resources.ExtractionError):
except (IndexError, pkg_resources.ResolutionError, pkg_resources.ExtractionError):
req = pkg_resources.Requirement.parse(package)
except ValueError:
# This is a zip file. We no longer use this in Home Assistant,

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2023.3.0.dev0"
version = "2023.3.0b4"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"

View File

@@ -156,7 +156,7 @@ aioecowitt==2023.01.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==13.4.0
aioesphomeapi==13.4.1
# homeassistant.components.flo
aioflo==2021.11.0
@@ -249,7 +249,7 @@ aiopvpc==4.0.1
aiopyarr==22.11.0
# homeassistant.components.qnap_qsw
aioqsw==0.3.1
aioqsw==0.3.2
# homeassistant.components.recollect_waste
aiorecollect==1.0.8
@@ -907,7 +907,7 @@ hole==0.8.0
holidays==0.18.0
# homeassistant.components.frontend
home-assistant-frontend==20230222.0
home-assistant-frontend==20230224.0
# homeassistant.components.conversation
home-assistant-intents==2023.2.22
@@ -1144,7 +1144,7 @@ moat-ble==0.1.1
moehlenhoff-alpha2==1.3.0
# homeassistant.components.mopeka
mopeka_iot_ble==0.4.0
mopeka_iot_ble==0.4.1
# homeassistant.components.motion_blinds
motionblinds==0.6.17
@@ -1201,7 +1201,7 @@ nextcord==2.0.0a8
nextdns==1.3.0
# homeassistant.components.nibe_heatpump
nibe==1.6.0
nibe==2.0.0
# homeassistant.components.niko_home_control
niko-home-control==0.2.1
@@ -2081,7 +2081,7 @@ python-kasa==0.5.1
# python-lirc==1.2.3
# homeassistant.components.matter
python-matter-server==3.0.0
python-matter-server==3.1.0
# homeassistant.components.xiaomi_miio
python-miio==0.5.12
@@ -2097,7 +2097,7 @@ python-nest==4.2.0
# homeassistant.components.otbr
# homeassistant.components.thread
python-otbr-api==1.0.3
python-otbr-api==1.0.4
# homeassistant.components.picnic
python-picnic-api==1.1.0
@@ -2237,7 +2237,7 @@ regenmaschine==2022.11.0
renault-api==0.1.12
# homeassistant.components.reolink
reolink-aio==0.5.0
reolink-aio==0.5.1
# homeassistant.components.python_script
restrictedpython==6.0
@@ -2518,7 +2518,7 @@ tololib==0.1.0b4
toonapi==0.2.1
# homeassistant.components.totalconnect
total_connect_client==2023.1
total_connect_client==2023.2
# homeassistant.components.tplink_lte
tp-connected==0.0.4
@@ -2670,13 +2670,13 @@ xs1-api-client==3.0.0
yalesmartalarmclient==0.3.9
# homeassistant.components.yalexs_ble
yalexs-ble==2.0.2
yalexs-ble==2.0.4
# homeassistant.components.august
yalexs==1.2.7
# homeassistant.components.august
yalexs_ble==2.0.2
yalexs_ble==2.0.4
# homeassistant.components.yeelight
yeelight==0.7.10

View File

@@ -143,7 +143,7 @@ aioecowitt==2023.01.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==13.4.0
aioesphomeapi==13.4.1
# homeassistant.components.flo
aioflo==2021.11.0
@@ -227,7 +227,7 @@ aiopvpc==4.0.1
aiopyarr==22.11.0
# homeassistant.components.qnap_qsw
aioqsw==0.3.1
aioqsw==0.3.2
# homeassistant.components.recollect_waste
aiorecollect==1.0.8
@@ -690,7 +690,7 @@ hole==0.8.0
holidays==0.18.0
# homeassistant.components.frontend
home-assistant-frontend==20230222.0
home-assistant-frontend==20230224.0
# homeassistant.components.conversation
home-assistant-intents==2023.2.22
@@ -849,7 +849,7 @@ moat-ble==0.1.1
moehlenhoff-alpha2==1.3.0
# homeassistant.components.mopeka
mopeka_iot_ble==0.4.0
mopeka_iot_ble==0.4.1
# homeassistant.components.motion_blinds
motionblinds==0.6.17
@@ -891,7 +891,7 @@ nextcord==2.0.0a8
nextdns==1.3.0
# homeassistant.components.nibe_heatpump
nibe==1.6.0
nibe==2.0.0
# homeassistant.components.nfandroidtv
notifications-android-tv==0.1.5
@@ -1480,7 +1480,7 @@ python-juicenet==1.1.0
python-kasa==0.5.1
# homeassistant.components.matter
python-matter-server==3.0.0
python-matter-server==3.1.0
# homeassistant.components.xiaomi_miio
python-miio==0.5.12
@@ -1490,7 +1490,7 @@ python-nest==4.2.0
# homeassistant.components.otbr
# homeassistant.components.thread
python-otbr-api==1.0.3
python-otbr-api==1.0.4
# homeassistant.components.picnic
python-picnic-api==1.1.0
@@ -1585,7 +1585,7 @@ regenmaschine==2022.11.0
renault-api==0.1.12
# homeassistant.components.reolink
reolink-aio==0.5.0
reolink-aio==0.5.1
# homeassistant.components.python_script
restrictedpython==6.0
@@ -1773,7 +1773,7 @@ tololib==0.1.0b4
toonapi==0.2.1
# homeassistant.components.totalconnect
total_connect_client==2023.1
total_connect_client==2023.2
# homeassistant.components.tplink_omada
tplink-omada-client==1.1.0
@@ -1895,13 +1895,13 @@ xmltodict==0.13.0
yalesmartalarmclient==0.3.9
# homeassistant.components.yalexs_ble
yalexs-ble==2.0.2
yalexs-ble==2.0.4
# homeassistant.components.august
yalexs==1.2.7
# homeassistant.components.august
yalexs_ble==2.0.2
yalexs_ble==2.0.4
# homeassistant.components.yeelight
yeelight==0.7.10

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import ast
from collections import deque
from pathlib import Path
from homeassistant.const import Platform
@@ -118,6 +119,7 @@ ALLOWED_USED_COMPONENTS = {
"input_text",
"media_source",
"onboarding",
"panel_custom",
"persistent_notification",
"person",
"script",
@@ -138,20 +140,19 @@ IGNORE_VIOLATIONS = {
# Has same requirement, gets defaults.
("sql", "recorder"),
# Sharing a base class
("openalpr_cloud", "openalpr_local"),
("lutron_caseta", "lutron"),
("ffmpeg_noise", "ffmpeg_motion"),
# Demo
("demo", "manual"),
("demo", "openalpr_local"),
# This would be a circular dep
("http", "network"),
# This would be a circular dep
("zha", "homeassistant_hardware"),
("zha", "homeassistant_yellow"),
# This should become a helper method that integrations can submit data to
("websocket_api", "lovelace"),
("websocket_api", "shopping_list"),
"logbook",
# Migration wizard from zwave to zwave_js.
"zwave_js",
}
@@ -229,6 +230,7 @@ def find_non_referenced_integrations(
def validate_dependencies(
integrations: dict[str, Integration],
integration: Integration,
check_dependencies: bool,
) -> None:
"""Validate all dependencies."""
# Some integrations are allowed to have violations.
@@ -250,12 +252,60 @@ def validate_dependencies(
"or 'after_dependencies'",
)
if check_dependencies:
_check_circular_deps(
integrations, integration.domain, integration, set(), deque()
)
def _check_circular_deps(
integrations: dict[str, Integration],
start_domain: str,
integration: Integration,
checked: set[str],
checking: deque[str],
) -> None:
"""Check for circular dependencies pointing at starting_domain."""
if integration.domain in checked or integration.domain in checking:
return
checking.append(integration.domain)
for domain in integration.manifest.get("dependencies", []):
if domain == start_domain:
integrations[start_domain].add_error(
"dependencies",
f"Found a circular dependency with {integration.domain} ({', '.join(checking)})",
)
break
_check_circular_deps(
integrations, start_domain, integrations[domain], checked, checking
)
else:
for domain in integration.manifest.get("after_dependencies", []):
if domain == start_domain:
integrations[start_domain].add_error(
"dependencies",
f"Found a circular dependency with after dependencies of {integration.domain} ({', '.join(checking)})",
)
break
_check_circular_deps(
integrations, start_domain, integrations[domain], checked, checking
)
checked.add(integration.domain)
checking.remove(integration.domain)
def validate(integrations: dict[str, Integration], config: Config) -> None:
"""Handle dependencies for integrations."""
# check for non-existing dependencies
for integration in integrations.values():
validate_dependencies(integrations, integration)
validate_dependencies(
integrations,
integration,
check_dependencies=not config.specific_integrations,
)
if config.specific_integrations:
continue

View File

@@ -166,3 +166,4 @@ async def test_step_reauth(
assert len(hass.config_entries.async_entries()) == 1
assert hass.config_entries.async_entries()[0].data[CONF_API_KEY] == new_api_key
await hass.async_block_till_done()

View File

@@ -19,7 +19,7 @@ from homeassistant.components.bluetooth.active_update_coordinator import (
_T,
ActiveBluetoothDataUpdateCoordinator,
)
from homeassistant.core import HomeAssistant
from homeassistant.core import CoreState, HomeAssistant
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo
from homeassistant.setup import async_setup_component
@@ -395,3 +395,58 @@ async def test_polling_rejecting_the_first_time(
cancel()
unregister_listener()
async def test_no_polling_after_stop_event(
hass: HomeAssistant,
mock_bleak_scanner_start: MagicMock,
mock_bluetooth_adapters: None,
) -> None:
"""Test we do not poll after the stop event."""
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
needs_poll_calls = 0
def _needs_poll(
service_info: BluetoothServiceInfoBleak, seconds_since_last_poll: float | None
) -> bool:
nonlocal needs_poll_calls
needs_poll_calls += 1
return True
async def _poll_method(service_info: BluetoothServiceInfoBleak) -> dict[str, Any]:
return {"fake": "data"}
coordinator = MyCoordinator(
hass=hass,
logger=_LOGGER,
address="aa:bb:cc:dd:ee:ff",
mode=BluetoothScanningMode.ACTIVE,
needs_poll_method=_needs_poll,
poll_method=_poll_method,
)
assert coordinator.available is False # no data yet
mock_listener = MagicMock()
unregister_listener = coordinator.async_add_listener(mock_listener)
cancel = coordinator.async_start()
assert needs_poll_calls == 0
inject_bluetooth_service_info(hass, GENERIC_BLUETOOTH_SERVICE_INFO)
await hass.async_block_till_done()
assert coordinator.passive_data == {"rssi": GENERIC_BLUETOOTH_SERVICE_INFO.rssi}
assert coordinator.data == {"fake": "data"}
assert needs_poll_calls == 1
hass.state = CoreState.stopping
await hass.async_block_till_done()
assert needs_poll_calls == 1
# Should not generate a poll now
inject_bluetooth_service_info(hass, GENERIC_BLUETOOTH_SERVICE_INFO_2)
await hass.async_block_till_done()
assert needs_poll_calls == 1
cancel()
unregister_listener()

View File

@@ -16,7 +16,7 @@ from homeassistant.components.bluetooth import (
from homeassistant.components.bluetooth.active_update_processor import (
ActiveBluetoothProcessorCoordinator,
)
from homeassistant.core import HomeAssistant
from homeassistant.core import CoreState, HomeAssistant
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo
from homeassistant.setup import async_setup_component
@@ -384,3 +384,65 @@ async def test_rate_limit(
assert async_handle_update.mock_calls[-1] == call({"testdata": 1})
cancel()
async def test_no_polling_after_stop_event(
hass: HomeAssistant,
mock_bleak_scanner_start: MagicMock,
mock_bluetooth_adapters: None,
) -> None:
"""Test we do not poll after the stop event."""
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
needs_poll_calls = 0
def _update_method(service_info: BluetoothServiceInfoBleak):
return {"testdata": 0}
def _poll_needed(*args, **kwargs):
nonlocal needs_poll_calls
needs_poll_calls += 1
return True
async def _poll(*args, **kwargs):
return {"testdata": 1}
coordinator = ActiveBluetoothProcessorCoordinator(
hass,
_LOGGER,
address="aa:bb:cc:dd:ee:ff",
mode=BluetoothScanningMode.ACTIVE,
update_method=_update_method,
needs_poll_method=_poll_needed,
poll_method=_poll,
)
assert coordinator.available is False # no data yet
processor = MagicMock()
coordinator.async_register_processor(processor)
async_handle_update = processor.async_handle_update
cancel = coordinator.async_start()
inject_bluetooth_service_info(hass, GENERIC_BLUETOOTH_SERVICE_INFO)
await hass.async_block_till_done()
assert needs_poll_calls == 1
assert coordinator.available is True
# async_handle_update should have been called twice
# The first time, it was passed the data from parsing the advertisement
# The second time, it was passed the data from polling
assert len(async_handle_update.mock_calls) == 2
assert async_handle_update.mock_calls[0] == call({"testdata": 0})
assert async_handle_update.mock_calls[1] == call({"testdata": 1})
hass.state = CoreState.stopping
await hass.async_block_till_done()
assert needs_poll_calls == 1
# Should not generate a poll now that CoreState is stopping
inject_bluetooth_service_info(hass, GENERIC_BLUETOOTH_SERVICE_INFO_2)
await hass.async_block_till_done()
assert needs_poll_calls == 1
cancel()

View File

@@ -310,6 +310,30 @@ async def test_unsupported_create_event_service(hass: HomeAssistant) -> None:
vol.error.MultipleInvalid,
"must contain at most one of start_date, start_date_time, in.",
),
(
{
"start_date_time": "2022-04-01T06:00:00+00:00",
"end_date_time": "2022-04-01T07:00:00+01:00",
},
vol.error.MultipleInvalid,
"Expected all values to have the same timezone",
),
(
{
"start_date_time": "2022-04-01T07:00:00",
"end_date_time": "2022-04-01T06:00:00",
},
vol.error.MultipleInvalid,
"Values were not in order",
),
(
{
"start_date": "2022-04-02",
"end_date": "2022-04-01",
},
vol.error.MultipleInvalid,
"Values were not in order",
),
],
ids=[
"missing_all",
@@ -324,6 +348,9 @@ async def test_unsupported_create_event_service(hass: HomeAssistant) -> None:
"multiple_in",
"unexpected_in_with_date",
"unexpected_in_with_datetime",
"inconsistent_timezone",
"incorrect_date_order",
"incorrect_datetime_order",
],
)
async def test_create_event_service_invalid_params(

View File

@@ -48,8 +48,12 @@ class FakeStore(LocalCalendarStore):
def mock_store() -> None:
"""Test cleanup, remove any media storage persisted during the test."""
stores: dict[Path, FakeStore] = {}
def new_store(hass: HomeAssistant, path: Path) -> FakeStore:
return FakeStore(hass, path)
if path not in stores:
stores[path] = FakeStore(hass, path)
return stores[path]
with patch(
"homeassistant.components.local_calendar.LocalCalendarStore", new=new_store
@@ -961,8 +965,20 @@ async def test_update_invalid_event_id(
assert resp.get("error").get("code") == "failed"
@pytest.mark.parametrize(
("start_date_time", "end_date_time"),
[
("1997-07-14T17:00:00+00:00", "1997-07-15T04:00:00+00:00"),
("1997-07-14T11:00:00-06:00", "1997-07-14T22:00:00-06:00"),
],
)
async def test_create_event_service(
hass: HomeAssistant, setup_integration: None, get_events: GetEventsFn
hass: HomeAssistant,
setup_integration: None,
get_events: GetEventsFn,
start_date_time: str,
end_date_time: str,
config_entry: MockConfigEntry,
) -> None:
"""Test creating an event using the create_event service."""
@@ -970,13 +986,15 @@ async def test_create_event_service(
"calendar",
"create_event",
{
"start_date_time": "1997-07-14T17:00:00+00:00",
"end_date_time": "1997-07-15T04:00:00+00:00",
"start_date_time": start_date_time,
"end_date_time": end_date_time,
"summary": "Bastille Day Party",
},
target={"entity_id": TEST_ENTITY},
blocking=True,
)
# Ensure data is written to disk
await hass.async_block_till_done()
events = await get_events("1997-07-14T00:00:00Z", "1997-07-16T00:00:00Z")
assert list(map(event_fields, events)) == [
@@ -995,3 +1013,17 @@ async def test_create_event_service(
"end": {"dateTime": "1997-07-14T22:00:00-06:00"},
}
]
# Reload the config entry, which reloads the content from the store and
# verifies that the persisted data can be parsed correctly.
await hass.config_entries.async_reload(config_entry.entry_id)
await hass.async_block_till_done()
events = await get_events("1997-07-13T00:00:00Z", "1997-07-14T18:00:00Z")
assert list(map(event_fields, events)) == [
{
"summary": "Bastille Day Party",
"start": {"dateTime": "1997-07-14T11:00:00-06:00"},
"end": {"dateTime": "1997-07-14T22:00:00-06:00"},
}
]

View File

@@ -31,7 +31,7 @@ async def test_contact_sensor(
"""Test contact sensor."""
state = hass.states.get("binary_sensor.mock_contact_sensor_contact")
assert state
assert state.state == "on"
assert state.state == "off"
set_node_attribute(contact_sensor_node, 1, 69, 0, False)
await trigger_subscription_callback(
@@ -40,7 +40,7 @@ async def test_contact_sensor(
state = hass.states.get("binary_sensor.mock_contact_sensor_contact")
assert state
assert state.state == "off"
assert state.state == "on"
@pytest.fixture(name="occupancy_sensor_node")

View File

@@ -26,7 +26,7 @@ async def test_get_device_id(
node = await setup_integration_with_node_fixture(
hass, "device_diagnostics", matter_client
)
device_id = get_device_id(matter_client.server_info, node.node_devices[0])
device_id = get_device_id(matter_client.server_info, node.endpoints[0])
assert device_id == "00000000000004D2-0000000000000005-MatterNodeDevice"

View File

@@ -288,7 +288,7 @@ async def test_extended_color_light(
"turn_on",
{
"entity_id": entity_id,
"hs_color": (0, 0),
"hs_color": (236.69291338582678, 100.0),
},
blocking=True,
)
@@ -297,10 +297,14 @@ async def test_extended_color_light(
matter_client.send_device_command.assert_has_calls(
[
call(
node_id=light_node.node_id,
node_id=1,
endpoint_id=1,
command=clusters.ColorControl.Commands.MoveToHueAndSaturation(
hue=0, saturation=0, transitionTime=0
hue=167,
saturation=254,
transitionTime=0,
optionsMask=0,
optionsOverride=0,
),
),
call(

View File

@@ -121,14 +121,14 @@ async def test_light_sensor(
light_sensor_node: MatterNode,
) -> None:
"""Test light sensor."""
state = hass.states.get("sensor.mock_light_sensor_light")
state = hass.states.get("sensor.mock_light_sensor_illuminance")
assert state
assert state.state == "1.3"
set_node_attribute(light_sensor_node, 1, 1024, 0, 3000)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("sensor.mock_light_sensor_light")
state = hass.states.get("sensor.mock_light_sensor_illuminance")
assert state
assert state.state == "2.0"

View File

@@ -23,6 +23,16 @@ PRO_SERVICE_INFO = BluetoothServiceInfo(
source="local",
)
PRO_UNUSABLE_SIGNAL_SERVICE_INFO = BluetoothServiceInfo(
name="",
address="aa:bb:cc:dd:ee:ff",
rssi=-60,
manufacturer_data={89: b"\x08rF\x00\x00\xe0\xf5\t\xf0\xd8"},
service_data={},
service_uuids=["0000fee5-0000-1000-8000-00805f9b34fb"],
source="local",
)
PRO_GOOD_SIGNAL_SERVICE_INFO = BluetoothServiceInfo(
name="",

View File

@@ -10,14 +10,52 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant
from . import PRO_GOOD_SIGNAL_SERVICE_INFO, PRO_SERVICE_INFO
from . import (
PRO_GOOD_SIGNAL_SERVICE_INFO,
PRO_SERVICE_INFO,
PRO_UNUSABLE_SIGNAL_SERVICE_INFO,
)
from tests.common import MockConfigEntry
from tests.components.bluetooth import inject_bluetooth_service_info
async def test_sensors_bad_signal(hass: HomeAssistant) -> None:
"""Test setting up creates the sensors when there is bad signal."""
async def test_sensors_unusable_signal(hass: HomeAssistant) -> None:
"""Test setting up creates the sensors when there is unusable signal."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="aa:bb:cc:dd:ee:ff",
)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_all("sensor")) == 0
inject_bluetooth_service_info(hass, PRO_UNUSABLE_SIGNAL_SERVICE_INFO)
await hass.async_block_till_done()
assert len(hass.states.async_all("sensor")) == 4
temp_sensor = hass.states.get("sensor.pro_plus_eeff_temperature")
temp_sensor_attrs = temp_sensor.attributes
assert temp_sensor.state == "30"
assert temp_sensor_attrs[ATTR_FRIENDLY_NAME] == "Pro Plus EEFF Temperature"
assert temp_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == UnitOfTemperature.CELSIUS
assert temp_sensor_attrs[ATTR_STATE_CLASS] == "measurement"
tank_sensor = hass.states.get("sensor.pro_plus_eeff_tank_level")
tank_sensor_attrs = tank_sensor.attributes
assert tank_sensor.state == STATE_UNKNOWN
assert tank_sensor_attrs[ATTR_FRIENDLY_NAME] == "Pro Plus EEFF Tank Level"
assert tank_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == UnitOfLength.MILLIMETERS
assert tank_sensor_attrs[ATTR_STATE_CLASS] == "measurement"
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
async def test_sensors_poor_signal(hass: HomeAssistant) -> None:
"""Test setting up creates the sensors when there is poor signal."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="aa:bb:cc:dd:ee:ff",
@@ -41,7 +79,7 @@ async def test_sensors_bad_signal(hass: HomeAssistant) -> None:
tank_sensor = hass.states.get("sensor.pro_plus_eeff_tank_level")
tank_sensor_attrs = tank_sensor.attributes
assert tank_sensor.state == STATE_UNKNOWN
assert tank_sensor.state == "0"
assert tank_sensor_attrs[ATTR_FRIENDLY_NAME] == "Pro Plus EEFF Tank Level"
assert tank_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == UnitOfLength.MILLIMETERS
assert tank_sensor_attrs[ATTR_STATE_CLASS] == "measurement"

View File

@@ -4,9 +4,9 @@ from contextlib import ExitStack
from typing import Any
from unittest.mock import AsyncMock, Mock, patch
from nibe.coil import Coil
from nibe.coil import Coil, CoilData
from nibe.connection import Connection
from nibe.exceptions import CoilReadException
from nibe.exceptions import ReadException
import pytest
@@ -39,12 +39,11 @@ async def fixture_coils(mock_connection):
"""Return a dict with coil data."""
coils: dict[int, Any] = {}
async def read_coil(coil: Coil, timeout: float = 0) -> Coil:
async def read_coil(coil: Coil, timeout: float = 0) -> CoilData:
nonlocal coils
if (data := coils.get(coil.address, None)) is None:
raise CoilReadException()
coil.value = data
return coil
raise ReadException()
return CoilData(coil, data)
async def read_coils(
coils: Iterable[Coil], timeout: float = 0

View File

@@ -3,7 +3,7 @@ from typing import Any
from unittest.mock import AsyncMock, patch
from freezegun.api import FrozenDateTimeFactory
from nibe.coil import Coil
from nibe.coil import CoilData
from nibe.coil_groups import UNIT_COILGROUPS
from nibe.heatpump import Model
import pytest
@@ -91,6 +91,6 @@ async def test_reset_button(
# Verify reset was written
args = mock_connection.write_coil.call_args
assert args
coil: Coil = args.args[0]
assert coil.address == unit.alarm_reset
coil: CoilData = args.args[0]
assert coil.coil.address == unit.alarm_reset
assert coil.value == 1

View File

@@ -5,9 +5,9 @@ from nibe.coil import Coil
from nibe.exceptions import (
AddressInUseException,
CoilNotFoundException,
CoilReadException,
CoilReadSendException,
CoilWriteException,
ReadException,
ReadSendException,
WriteException,
)
import pytest
@@ -169,7 +169,7 @@ async def test_read_timeout(
"""Test we handle cannot connect error."""
result = await _get_connection_form(hass, connection_type)
mock_connection.verify_connectivity.side_effect = CoilReadException()
mock_connection.verify_connectivity.side_effect = ReadException()
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data)
@@ -190,7 +190,7 @@ async def test_write_timeout(
"""Test we handle cannot connect error."""
result = await _get_connection_form(hass, connection_type)
mock_connection.verify_connectivity.side_effect = CoilWriteException()
mock_connection.verify_connectivity.side_effect = WriteException()
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data)
@@ -232,7 +232,7 @@ async def test_nibegw_invalid_host(
"""Test we handle cannot connect error."""
result = await _get_connection_form(hass, connection_type)
mock_connection.verify_connectivity.side_effect = CoilReadSendException()
mock_connection.verify_connectivity.side_effect = ReadSendException()
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data)

View File

@@ -1,4 +1,5 @@
"""Define test fixtures for OpenUV."""
from collections.abc import Generator
import json
from unittest.mock import AsyncMock, Mock, patch
@@ -20,6 +21,15 @@ TEST_LATITUDE = 51.528308
TEST_LONGITUDE = -0.3817765
@pytest.fixture
def mock_setup_entry() -> Generator[AsyncMock, None, None]:
"""Override async_setup_entry."""
with patch(
"homeassistant.components.openuv.async_setup_entry", return_value=True
) as mock_setup_entry:
yield mock_setup_entry
@pytest.fixture(name="client")
def client_fixture(data_protection_window, data_uv_index):
"""Define a mock Client object."""

View File

@@ -2,6 +2,7 @@
from unittest.mock import AsyncMock, patch
from pyopenuv.errors import InvalidApiKeyError
import pytest
import voluptuous as vol
from homeassistant import data_entry_flow
@@ -17,6 +18,8 @@ from homeassistant.core import HomeAssistant
from .conftest import TEST_API_KEY, TEST_ELEVATION, TEST_LATITUDE, TEST_LONGITUDE
pytestmark = pytest.mark.usefixtures("mock_setup_entry")
async def test_create_entry(hass: HomeAssistant, client, config, mock_pyopenuv) -> None:
"""Test creating an entry."""

View File

@@ -1,8 +1,11 @@
"""Test the Open Thread Border Router config flow."""
import asyncio
from http import HTTPStatus
from unittest.mock import patch
import aiohttp
import pytest
import python_otbr_api
from homeassistant.components import hassio, otbr
from homeassistant.core import HomeAssistant
@@ -95,7 +98,7 @@ async def test_user_flow_router_not_setup(
assert aioclient_mock.mock_calls[-1][0] == "POST"
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
assert aioclient_mock.mock_calls[-1][2] == "enabled"
assert aioclient_mock.mock_calls[-1][2] == "enable"
expected_data = {
"url": "http://custom_url:1234",
@@ -137,6 +140,34 @@ async def test_user_flow_404(
assert result["errors"] == {"base": "cannot_connect"}
@pytest.mark.parametrize(
"error",
[
asyncio.TimeoutError,
python_otbr_api.OTBRError,
aiohttp.ClientError,
],
)
async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None:
"""Test the user flow."""
result = await hass.config_entries.flow.async_init(
otbr.DOMAIN, context={"source": "user"}
)
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {}
with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"url": "http://custom_url:1234",
},
)
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_hassio_discovery_flow(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
@@ -199,7 +230,7 @@ async def test_hassio_discovery_flow_router_not_setup(
assert aioclient_mock.mock_calls[-1][0] == "POST"
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
assert aioclient_mock.mock_calls[-1][2] == "enabled"
assert aioclient_mock.mock_calls[-1][2] == "enable"
expected_data = {
"url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}",
@@ -248,7 +279,7 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred(
assert aioclient_mock.mock_calls[-1][0] == "POST"
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
assert aioclient_mock.mock_calls[-1][2] == "enabled"
assert aioclient_mock.mock_calls[-1][2] == "enable"
expected_data = {
"url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}",

View File

@@ -1,9 +1,11 @@
"""Test the Open Thread Border Router integration."""
import asyncio
from http import HTTPStatus
from unittest.mock import patch
import aiohttp
import pytest
import python_otbr_api
from homeassistant.components import otbr
from homeassistant.core import HomeAssistant
@@ -35,9 +37,15 @@ async def test_import_dataset(hass: HomeAssistant) -> None:
mock_add.assert_called_once_with(config_entry.title, DATASET.hex())
async def test_config_entry_not_ready(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
@pytest.mark.parametrize(
"error",
[
asyncio.TimeoutError,
python_otbr_api.OTBRError,
aiohttp.ClientError,
],
)
async def test_config_entry_not_ready(hass: HomeAssistant, error) -> None:
"""Test raising ConfigEntryNotReady ."""
config_entry = MockConfigEntry(
@@ -47,8 +55,8 @@ async def test_config_entry_not_ready(
title="My OTBR",
)
config_entry.add_to_hass(hass)
aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED)
assert not await hass.config_entries.async_setup(config_entry.entry_id)
with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error):
assert not await hass.config_entries.async_setup(config_entry.entry_id)
async def test_remove_entry(

View File

@@ -75,7 +75,7 @@ async def mock_aiopurpleair_fixture(api):
with patch(
"homeassistant.components.purpleair.config_flow.API", return_value=api
), patch("homeassistant.components.purpleair.coordinator.API", return_value=api):
yield
yield api
@pytest.fixture(name="setup_config_entry")

View File

@@ -123,7 +123,7 @@ async def test_duplicate_error(
)
async def test_reauth(
hass: HomeAssistant,
api,
mock_aiopurpleair,
check_api_key_errors,
check_api_key_mock,
config_entry,
@@ -143,7 +143,7 @@ async def test_reauth(
assert result["step_id"] == "reauth_confirm"
# Test errors that can arise when checking the API key:
with patch.object(api, "async_check_api_key", check_api_key_mock):
with patch.object(mock_aiopurpleair, "async_check_api_key", check_api_key_mock):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"api_key": "new_api_key"}
)
@@ -157,6 +157,9 @@ async def test_reauth(
assert result["type"] == data_entry_flow.FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert len(hass.config_entries.async_entries()) == 1
# Unload to make sure the update does not run after the
# mock is removed.
await hass.config_entries.async_unload(config_entry.entry_id)
@pytest.mark.parametrize(
@@ -169,7 +172,7 @@ async def test_reauth(
)
async def test_options_add_sensor(
hass: HomeAssistant,
api,
mock_aiopurpleair,
config_entry,
get_nearby_sensors_errors,
get_nearby_sensors_mock,
@@ -187,7 +190,9 @@ async def test_options_add_sensor(
assert result["step_id"] == "add_sensor"
# Test errors that can arise when searching for nearby sensors:
with patch.object(api.sensors, "async_get_nearby_sensors", get_nearby_sensors_mock):
with patch.object(
mock_aiopurpleair.sensors, "async_get_nearby_sensors", get_nearby_sensors_mock
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
@@ -225,6 +230,9 @@ async def test_options_add_sensor(
TEST_SENSOR_INDEX1,
TEST_SENSOR_INDEX2,
]
# Unload to make sure the update does not run after the
# mock is removed.
await hass.config_entries.async_unload(config_entry.entry_id)
async def test_options_add_sensor_duplicate(
@@ -260,6 +268,9 @@ async def test_options_add_sensor_duplicate(
)
assert result["type"] == data_entry_flow.FlowResultType.ABORT
assert result["reason"] == "already_configured"
# Unload to make sure the update does not run after the
# mock is removed.
await hass.config_entries.async_unload(config_entry.entry_id)
async def test_options_remove_sensor(
@@ -288,3 +299,6 @@ async def test_options_remove_sensor(
}
assert config_entry.options["sensor_indices"] == []
# Unload to make sure the update does not run after the
# mock is removed.
await hass.config_entries.async_unload(config_entry.entry_id)

View File

@@ -71,7 +71,10 @@ async def test_form(hass: HomeAssistant, nvr: NVR) -> None:
), patch(
"homeassistant.components.unifiprotect.async_setup_entry",
return_value=True,
) as mock_setup_entry:
) as mock_setup_entry, patch(
"homeassistant.components.unifiprotect.async_setup",
return_value=True,
) as mock_setup:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
@@ -93,6 +96,7 @@ async def test_form(hass: HomeAssistant, nvr: NVR) -> None:
"verify_ssl": False,
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_setup.mock_calls) == 1
async def test_form_version_too_old(hass: HomeAssistant, old_nvr: NVR) -> None:
@@ -214,7 +218,10 @@ async def test_form_reauth_auth(hass: HomeAssistant, nvr: NVR) -> None:
with patch(
"homeassistant.components.unifiprotect.config_flow.ProtectApiClient.get_nvr",
return_value=nvr,
):
), patch(
"homeassistant.components.unifiprotect.async_setup",
return_value=True,
) as mock_setup:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
{
@@ -225,6 +232,7 @@ async def test_form_reauth_auth(hass: HomeAssistant, nvr: NVR) -> None:
assert result3["type"] == FlowResultType.ABORT
assert result3["reason"] == "reauth_successful"
assert len(mock_setup.mock_calls) == 1
async def test_form_options(hass: HomeAssistant, ufp_client: ProtectApiClient) -> None:
@@ -332,7 +340,10 @@ async def test_discovered_by_unifi_discovery_direct_connect(
), patch(
"homeassistant.components.unifiprotect.async_setup_entry",
return_value=True,
) as mock_setup_entry:
) as mock_setup_entry, patch(
"homeassistant.components.unifiprotect.async_setup",
return_value=True,
) as mock_setup:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
@@ -353,6 +364,7 @@ async def test_discovered_by_unifi_discovery_direct_connect(
"verify_ssl": True,
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_setup.mock_calls) == 1
async def test_discovered_by_unifi_discovery_direct_connect_updated(
@@ -515,7 +527,10 @@ async def test_discovered_by_unifi_discovery(hass: HomeAssistant, nvr: NVR) -> N
), patch(
"homeassistant.components.unifiprotect.async_setup_entry",
return_value=True,
) as mock_setup_entry:
) as mock_setup_entry, patch(
"homeassistant.components.unifiprotect.async_setup",
return_value=True,
) as mock_setup:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
@@ -536,6 +551,7 @@ async def test_discovered_by_unifi_discovery(hass: HomeAssistant, nvr: NVR) -> N
"verify_ssl": False,
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_setup.mock_calls) == 1
async def test_discovered_by_unifi_discovery_partial(
@@ -567,7 +583,10 @@ async def test_discovered_by_unifi_discovery_partial(
), patch(
"homeassistant.components.unifiprotect.async_setup_entry",
return_value=True,
) as mock_setup_entry:
) as mock_setup_entry, patch(
"homeassistant.components.unifiprotect.async_setup",
return_value=True,
) as mock_setup:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
@@ -588,6 +607,7 @@ async def test_discovered_by_unifi_discovery_partial(
"verify_ssl": False,
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_setup.mock_calls) == 1
async def test_discovered_by_unifi_discovery_direct_connect_on_different_interface(
@@ -736,7 +756,10 @@ async def test_discovered_by_unifi_discovery_direct_connect_on_different_interfa
), patch(
"homeassistant.components.unifiprotect.async_setup_entry",
return_value=True,
) as mock_setup_entry:
) as mock_setup_entry, patch(
"homeassistant.components.unifiprotect.async_setup",
return_value=True,
) as mock_setup:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
@@ -757,6 +780,7 @@ async def test_discovered_by_unifi_discovery_direct_connect_on_different_interfa
"verify_ssl": True,
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_setup.mock_calls) == 1
async def test_discovered_by_unifi_discovery_direct_connect_on_different_interface_resolver_no_result(

View File

@@ -15,6 +15,7 @@ import zigpy.types
from homeassistant import config_entries
from homeassistant.components import ssdp, usb, zeroconf
from homeassistant.components.hassio import AddonState
from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL
from homeassistant.components.zha import config_flow, radio_manager
from homeassistant.components.zha.core.const import (
@@ -1840,3 +1841,46 @@ async def test_options_flow_migration_reset_old_adapter(
user_input={},
)
assert result4["step_id"] == "choose_serial_port"
async def test_config_flow_port_yellow_port_name(hass: HomeAssistant) -> None:
"""Test config flow serial port name for Yellow Zigbee radio."""
port = com_port(device="/dev/ttyAMA1")
port.serial_number = None
port.manufacturer = None
port.description = None
with patch(
"homeassistant.components.zha.config_flow.yellow_hardware.async_info"
), patch("serial.tools.list_ports.comports", MagicMock(return_value=[port])):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={CONF_SOURCE: SOURCE_USER},
)
assert (
result["data_schema"].schema["path"].container[0]
== "/dev/ttyAMA1 - Yellow Zigbee module - Nabu Casa"
)
async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) -> None:
"""Test config flow serial port name for multiprotocol add-on."""
with patch(
"homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info"
) as async_get_addon_info, patch(
"serial.tools.list_ports.comports", MagicMock(return_value=[])
):
async_get_addon_info.return_value.state = AddonState.RUNNING
async_get_addon_info.return_value.hostname = "core-silabs-multiprotocol"
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={CONF_SOURCE: SOURCE_USER},
)
assert (
result["data_schema"].schema["path"].container[0]
== "socket://core-silabs-multiprotocol:9999 - Multiprotocol add-on - Nabu Casa"
)

View File

@@ -287,3 +287,39 @@ async def test_gateway_initialize_failure_transient(
# Initialization immediately stops and is retried after TransientConnectionError
assert mock_new.call_count == 2
@patch(
"homeassistant.components.zha.core.gateway.ZHAGateway.async_load_devices",
MagicMock(),
)
@patch(
"homeassistant.components.zha.core.gateway.ZHAGateway.async_load_groups",
MagicMock(),
)
@pytest.mark.parametrize(
("device_path", "thread_state", "config_override"),
[
("/dev/ttyUSB0", True, {}),
("socket://192.168.1.123:9999", False, {}),
("socket://192.168.1.123:9999", True, {"use_thread": True}),
],
)
async def test_gateway_initialize_bellows_thread(
device_path, thread_state, config_override, hass, coordinator
):
"""Test ZHA disabling the UART thread when connecting to a TCP coordinator."""
zha_gateway = get_zha_gateway(hass)
assert zha_gateway is not None
zha_gateway.config_entry.data = dict(zha_gateway.config_entry.data)
zha_gateway.config_entry.data["device"]["path"] = device_path
zha_gateway._config.setdefault("zigpy_config", {}).update(config_override)
with patch(
"bellows.zigbee.application.ControllerApplication.new",
new=AsyncMock(),
) as mock_new:
await zha_gateway.async_initialize()
assert mock_new.mock_calls[0].args[0]["use_thread"] is thread_state

View File

@@ -2,6 +2,7 @@
from copy import deepcopy
from http import HTTPStatus
import json
from typing import Any
from unittest.mock import patch
import pytest
@@ -2983,12 +2984,18 @@ async def test_get_config_parameters(
assert msg["error"]["code"] == ERR_NOT_LOADED
@pytest.mark.parametrize(
("firmware_data", "expected_data"),
[({"target": "1"}, {"firmware_target": 1}), ({}, {})],
)
async def test_firmware_upload_view(
hass: HomeAssistant,
multisensor_6,
integration,
hass_client: ClientSessionGenerator,
firmware_file,
firmware_data: dict[str, Any],
expected_data: dict[str, Any],
) -> None:
"""Test the HTTP firmware upload view."""
client = await hass_client()
@@ -3001,15 +3008,19 @@ async def test_firmware_upload_view(
"homeassistant.components.zwave_js.api.USER_AGENT",
{"HomeAssistant": "0.0.0"},
):
data = {"file": firmware_file}
data.update(firmware_data)
resp = await client.post(
f"/api/zwave_js/firmware/upload/{device.id}",
data={"file": firmware_file},
f"/api/zwave_js/firmware/upload/{device.id}", data=data
)
update_data = NodeFirmwareUpdateData("file", bytes(10))
for attr, value in expected_data.items():
setattr(update_data, attr, value)
mock_controller_cmd.assert_not_called()
assert mock_node_cmd.call_args[0][1:3] == (
multisensor_6,
[NodeFirmwareUpdateData("file", bytes(10))],
)
assert mock_node_cmd.call_args[0][1:3] == (multisensor_6, [update_data])
assert mock_node_cmd.call_args[1] == {
"additional_user_agent_components": {"HomeAssistant": "0.0.0"},
}
@@ -3017,7 +3028,11 @@ async def test_firmware_upload_view(
async def test_firmware_upload_view_controller(
hass, client, integration, hass_client: ClientSessionGenerator, firmware_file
hass: HomeAssistant,
client,
integration,
hass_client: ClientSessionGenerator,
firmware_file,
) -> None:
"""Test the HTTP firmware upload view for a controller."""
hass_client = await hass_client()

View File

@@ -96,3 +96,20 @@ async def test_async_create_flow_checks_existing_flows_before_startup(
data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
]
async def test_async_create_flow_does_nothing_after_stop(
hass: HomeAssistant, mock_flow_init
) -> None:
"""Test we no longer create flows when hass is stopping."""
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
hass.state = CoreState.stopping
mock_flow_init.reset_mock()
discovery_flow.async_create_flow(
hass,
"hue",
{"source": config_entries.SOURCE_HOMEKIT},
{"properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
assert len(mock_flow_init.mock_calls) == 0

View File

@@ -29,6 +29,7 @@ from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.setup import async_set_domains_to_be_loaded, async_setup_component
from homeassistant.util import dt
import homeassistant.util.dt as dt_util
from .common import (
MockConfigEntry,
@@ -999,6 +1000,27 @@ async def test_setup_retrying_during_unload_before_started(hass: HomeAssistant)
)
async def test_setup_does_not_retry_during_shutdown(hass: HomeAssistant) -> None:
"""Test we do not retry when HASS is shutting down."""
entry = MockConfigEntry(domain="test")
mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
await entry.async_setup(hass)
assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY
assert len(mock_setup_entry.mock_calls) == 1
hass.state = CoreState.stopping
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5))
await hass.async_block_till_done()
assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY
assert len(mock_setup_entry.mock_calls) == 1
async def test_create_entry_options(hass: HomeAssistant) -> None:
"""Test a config entry being created with options."""

View File

@@ -9,6 +9,7 @@ import gc
import logging
import os
from tempfile import TemporaryDirectory
import time
from typing import Any
from unittest.mock import MagicMock, Mock, PropertyMock, patch
@@ -2003,3 +2004,49 @@ async def test_background_task(hass: HomeAssistant) -> None:
await asyncio.sleep(0)
await hass.async_stop()
assert result.result() == ha.CoreState.stopping
async def test_shutdown_does_not_block_on_normal_tasks(
hass: HomeAssistant,
) -> None:
"""Ensure shutdown does not block on normal tasks."""
result = asyncio.Future()
unshielded_task = asyncio.sleep(10)
async def test_task():
try:
await unshielded_task
except asyncio.CancelledError:
result.set_result(hass.state)
start = time.monotonic()
task = hass.async_create_task(test_task())
await asyncio.sleep(0)
await hass.async_stop()
await asyncio.sleep(0)
assert result.done()
assert task.done()
assert time.monotonic() - start < 0.5
async def test_shutdown_does_not_block_on_shielded_tasks(
hass: HomeAssistant,
) -> None:
"""Ensure shutdown does not block on shielded tasks."""
result = asyncio.Future()
shielded_task = asyncio.shield(asyncio.sleep(10))
async def test_task():
try:
await shielded_task
except asyncio.CancelledError:
result.set_result(hass.state)
start = time.monotonic()
task = hass.async_create_task(test_task())
await asyncio.sleep(0)
await hass.async_stop()
await asyncio.sleep(0)
assert result.done()
assert task.done()
assert time.monotonic() - start < 0.5