mirror of
https://github.com/home-assistant/core.git
synced 2025-09-22 03:19:33 +00:00
Compare commits
99 Commits
2023.3.0
...
frontend-d
Author | SHA1 | Date | |
---|---|---|---|
![]() |
651312fb42 | ||
![]() |
7b3cab1bfe | ||
![]() |
c096ef3fce | ||
![]() |
9fed4472f1 | ||
![]() |
7a5a882687 | ||
![]() |
73c7ee4326 | ||
![]() |
79f96fe900 | ||
![]() |
7cc8712a0c | ||
![]() |
0e8d28dab0 | ||
![]() |
fd87748b99 | ||
![]() |
00954dfc1f | ||
![]() |
e95944bf9f | ||
![]() |
ac70612ec5 | ||
![]() |
7419a92a1b | ||
![]() |
ff4de8cd06 | ||
![]() |
bdb9994b7e | ||
![]() |
2dcc2f88cc | ||
![]() |
db1dd16ab0 | ||
![]() |
2c2489284b | ||
![]() |
198ebaff6e | ||
![]() |
5cc9e7fedd | ||
![]() |
76819fbb23 | ||
![]() |
aeb6c4f078 | ||
![]() |
b25f6e3ffc | ||
![]() |
b542f6b3ac | ||
![]() |
a8d587bc53 | ||
![]() |
fe8f3602ff | ||
![]() |
735000475a | ||
![]() |
ae3e8746f7 | ||
![]() |
10bf910f88 | ||
![]() |
b7846de311 | ||
![]() |
66b33e1090 | ||
![]() |
4fd7ca503f | ||
![]() |
33466cdddd | ||
![]() |
0d25eef19c | ||
![]() |
b5223e1196 | ||
![]() |
1d1c553d9b | ||
![]() |
f8934175cb | ||
![]() |
4898d22960 | ||
![]() |
480a495239 | ||
![]() |
d219e7c8b1 | ||
![]() |
c8fc2dc440 | ||
![]() |
9be3f86a4c | ||
![]() |
bea81d3f63 | ||
![]() |
0f01866508 | ||
![]() |
588b51bdfa | ||
![]() |
0fb41bdffe | ||
![]() |
c9dfa15ed6 | ||
![]() |
e00ff54869 | ||
![]() |
7c23de469e | ||
![]() |
490a0908d4 | ||
![]() |
327edabb64 | ||
![]() |
b4a3a663cf | ||
![]() |
1519a78567 | ||
![]() |
57360a7528 | ||
![]() |
7b61d3763b | ||
![]() |
0f204d6502 | ||
![]() |
0a3a8c4b3c | ||
![]() |
091305fc57 | ||
![]() |
3499d60401 | ||
![]() |
f18c0bf626 | ||
![]() |
f52a5f6965 | ||
![]() |
1edef73c9a | ||
![]() |
5a365788b5 | ||
![]() |
a60fd18386 | ||
![]() |
0223058d25 | ||
![]() |
7b2e743a6b | ||
![]() |
69a3738bdb | ||
![]() |
e69091c6db | ||
![]() |
ee7dfdae30 | ||
![]() |
fdc06c2fc2 | ||
![]() |
ba929dfc79 | ||
![]() |
753c790a25 | ||
![]() |
ee8f746808 | ||
![]() |
84823d2fcf | ||
![]() |
0ae2fdc08b | ||
![]() |
d90ee85118 | ||
![]() |
2f826a6f86 | ||
![]() |
af49b98475 | ||
![]() |
9575cd9161 | ||
![]() |
f0b029c363 | ||
![]() |
a71487a42b | ||
![]() |
d5f1713498 | ||
![]() |
301144993c | ||
![]() |
e0601530a0 | ||
![]() |
e1e0400b16 | ||
![]() |
5739782877 | ||
![]() |
6112793b19 | ||
![]() |
f8314fe007 | ||
![]() |
dac3c7179f | ||
![]() |
6511b3f355 | ||
![]() |
6474297d1f | ||
![]() |
27ebee1501 | ||
![]() |
23b52025f9 | ||
![]() |
87dc692a20 | ||
![]() |
473db48943 | ||
![]() |
aa3657e071 | ||
![]() |
2a819f23c1 | ||
![]() |
c6ff79aa0e |
@@ -639,6 +639,10 @@ omit =
|
||||
homeassistant/components/linode/*
|
||||
homeassistant/components/linux_battery/sensor.py
|
||||
homeassistant/components/lirc/*
|
||||
homeassistant/components/livisi/__init__.py
|
||||
homeassistant/components/livisi/climate.py
|
||||
homeassistant/components/livisi/coordinator.py
|
||||
homeassistant/components/livisi/switch.py
|
||||
homeassistant/components/llamalab_automate/notify.py
|
||||
homeassistant/components/logi_circle/__init__.py
|
||||
homeassistant/components/logi_circle/camera.py
|
||||
@@ -803,7 +807,8 @@ omit =
|
||||
homeassistant/components/nuki/sensor.py
|
||||
homeassistant/components/nx584/alarm_control_panel.py
|
||||
homeassistant/components/oasa_telematics/sensor.py
|
||||
homeassistant/components/obihai/*
|
||||
homeassistant/components/obihai/connectivity.py
|
||||
homeassistant/components/obihai/sensor.py
|
||||
homeassistant/components/octoprint/__init__.py
|
||||
homeassistant/components/oem/climate.py
|
||||
homeassistant/components/ohmconnect/sensor.py
|
||||
|
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
@@ -31,7 +31,7 @@ env:
|
||||
CACHE_VERSION: 5
|
||||
PIP_CACHE_VERSION: 4
|
||||
MYPY_CACHE_VERSION: 4
|
||||
HA_SHORT_VERSION: 2023.3
|
||||
HA_SHORT_VERSION: 2023.4
|
||||
DEFAULT_PYTHON: "3.10"
|
||||
ALL_PYTHON_VERSIONS: "['3.10', '3.11']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -1073,10 +1073,10 @@ jobs:
|
||||
ffmpeg \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.1.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.3.0
|
||||
uses: actions/setup-python@v4.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
|
@@ -186,6 +186,7 @@ homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
homeassistant.components.litejet.*
|
||||
homeassistant.components.litterrobot.*
|
||||
homeassistant.components.local_ip.*
|
||||
homeassistant.components.lock.*
|
||||
|
@@ -825,7 +825,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nws/ @MatthewFlamm @kamiyo
|
||||
/homeassistant/components/nzbget/ @chriscla
|
||||
/tests/components/nzbget/ @chriscla
|
||||
/homeassistant/components/obihai/ @dshokouhi
|
||||
/homeassistant/components/obihai/ @dshokouhi @ejpenney
|
||||
/tests/components/obihai/ @dshokouhi @ejpenney
|
||||
/homeassistant/components/octoprint/ @rfleming71
|
||||
/tests/components/octoprint/ @rfleming71
|
||||
/homeassistant/components/ohmconnect/ @robbiet480
|
||||
@@ -1138,8 +1139,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/starline/ @anonym-tsk
|
||||
/homeassistant/components/starlink/ @boswelja
|
||||
/tests/components/starlink/ @boswelja
|
||||
/homeassistant/components/statistics/ @fabaff @ThomDietrich
|
||||
/tests/components/statistics/ @fabaff @ThomDietrich
|
||||
/homeassistant/components/statistics/ @ThomDietrich
|
||||
/tests/components/statistics/ @ThomDietrich
|
||||
/homeassistant/components/steam_online/ @tkdrob
|
||||
/tests/components/steam_online/ @tkdrob
|
||||
/homeassistant/components/steamist/ @bdraco
|
||||
|
@@ -3,7 +3,6 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from AIOAladdinConnect import AladdinConnectClient
|
||||
@@ -20,8 +19,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CLIENT_ID, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
@@ -134,12 +131,6 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_import(
|
||||
self, import_data: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Import Aladin Connect config from configuration.yaml."""
|
||||
return await self.async_step_user(import_data)
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
@@ -2,63 +2,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any, Final
|
||||
from typing import Any
|
||||
|
||||
from AIOAladdinConnect import AladdinConnectClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
PLATFORM_SCHEMA as BASE_PLATFORM_SCHEMA,
|
||||
CoverDeviceClass,
|
||||
CoverEntity,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
STATE_CLOSED,
|
||||
STATE_CLOSING,
|
||||
STATE_OPENING,
|
||||
)
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPENING
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DOMAIN, STATES_MAP, SUPPORTED_FEATURES
|
||||
from .model import DoorDevice
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_SCHEMA: Final = BASE_PLATFORM_SCHEMA.extend(
|
||||
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
|
||||
)
|
||||
SCAN_INTERVAL = timedelta(seconds=300)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up Aladdin Connect devices yaml depreciated."""
|
||||
_LOGGER.warning(
|
||||
"Configuring Aladdin Connect through yaml is deprecated. Please remove it from"
|
||||
" your configuration as it has already been imported to a config entry"
|
||||
)
|
||||
await hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
|
@@ -5,6 +5,7 @@ import asyncio
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
@@ -15,6 +16,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_state_change
|
||||
from homeassistant.helpers.significant_change import create_checker
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import API_CHANGE, DATE_FORMAT, DOMAIN, Cause
|
||||
from .entities import ENTITY_ADAPTERS, AlexaEntity, generate_alexa_id
|
||||
@@ -162,9 +164,10 @@ async def async_send_changereport_message(
|
||||
if response.status == HTTPStatus.ACCEPTED:
|
||||
return
|
||||
|
||||
response_json = json.loads(response_text)
|
||||
response_json = json_loads_object(response_text)
|
||||
response_payload = cast(JsonObjectType, response_json["payload"])
|
||||
|
||||
if response_json["payload"]["code"] == "INVALID_ACCESS_TOKEN_EXCEPTION":
|
||||
if response_payload["code"] == "INVALID_ACCESS_TOKEN_EXCEPTION":
|
||||
if invalidate_access_token:
|
||||
# Invalidate the access token and try again
|
||||
config.async_invalidate_access_token()
|
||||
@@ -180,8 +183,8 @@ async def async_send_changereport_message(
|
||||
_LOGGER.error(
|
||||
"Error when sending ChangeReport for %s to Alexa: %s: %s",
|
||||
alexa_entity.entity_id,
|
||||
response_json["payload"]["code"],
|
||||
response_json["payload"]["description"],
|
||||
response_payload["code"],
|
||||
response_payload["description"],
|
||||
)
|
||||
|
||||
|
||||
@@ -299,11 +302,12 @@ async def async_send_doorbell_event_message(hass, config, alexa_entity):
|
||||
if response.status == HTTPStatus.ACCEPTED:
|
||||
return
|
||||
|
||||
response_json = json.loads(response_text)
|
||||
response_json = json_loads_object(response_text)
|
||||
response_payload = cast(JsonObjectType, response_json["payload"])
|
||||
|
||||
_LOGGER.error(
|
||||
"Error when sending DoorbellPress event for %s to Alexa: %s: %s",
|
||||
alexa_entity.entity_id,
|
||||
response_json["payload"]["code"],
|
||||
response_json["payload"]["description"],
|
||||
response_payload["code"],
|
||||
response_payload["description"],
|
||||
)
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apprise",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["apprise"],
|
||||
"requirements": ["apprise==1.2.1"]
|
||||
"requirements": ["apprise==1.3.0"]
|
||||
}
|
||||
|
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.2"]
|
||||
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.4"]
|
||||
}
|
||||
|
@@ -106,6 +106,8 @@ class ActiveBluetoothDataUpdateCoordinator(
|
||||
|
||||
def needs_poll(self, service_info: BluetoothServiceInfoBleak) -> bool:
|
||||
"""Return true if time to try and poll."""
|
||||
if self.hass.is_stopping:
|
||||
return False
|
||||
poll_age: float | None = None
|
||||
if self._last_poll:
|
||||
poll_age = monotonic_time_coarse() - self._last_poll
|
||||
|
@@ -99,6 +99,8 @@ class ActiveBluetoothProcessorCoordinator(
|
||||
|
||||
def needs_poll(self, service_info: BluetoothServiceInfoBleak) -> bool:
|
||||
"""Return true if time to try and poll."""
|
||||
if self.hass.is_stopping:
|
||||
return False
|
||||
poll_age: float | None = None
|
||||
if self._last_poll:
|
||||
poll_age = monotonic_time_coarse() - self._last_poll
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==2.5.2"]
|
||||
"requirements": ["bthome-ble==2.7.0"]
|
||||
}
|
||||
|
@@ -119,6 +119,16 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
# Gas (m3)
|
||||
(
|
||||
BTHomeSensorDeviceClass.GAS,
|
||||
Units.VOLUME_CUBIC_METERS,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.GAS}_{Units.VOLUME_CUBIC_METERS}",
|
||||
device_class=SensorDeviceClass.GAS,
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
# Humidity in (percent)
|
||||
(BTHomeSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.HUMIDITY}_{Units.PERCENTAGE}",
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["caldav", "vobject"],
|
||||
"requirements": ["caldav==1.1.1"]
|
||||
"requirements": ["caldav==1.2.0"]
|
||||
}
|
||||
|
@@ -66,6 +66,55 @@ SCAN_INTERVAL = datetime.timedelta(seconds=60)
|
||||
# Don't support rrules more often than daily
|
||||
VALID_FREQS = {"DAILY", "WEEKLY", "MONTHLY", "YEARLY"}
|
||||
|
||||
|
||||
def _has_consistent_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that all datetime values have a consistent timezone."""
|
||||
|
||||
def validate(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Test that all keys that are datetime values have the same timezone."""
|
||||
tzinfos = []
|
||||
for key in keys:
|
||||
if not (value := obj.get(key)) or not isinstance(value, datetime.datetime):
|
||||
return obj
|
||||
tzinfos.append(value.tzinfo)
|
||||
uniq_values = groupby(tzinfos)
|
||||
if len(list(uniq_values)) > 1:
|
||||
raise vol.Invalid("Expected all values to have the same timezone")
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def _as_local_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Convert all datetime values to the local timezone."""
|
||||
|
||||
def validate(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Test that all keys that are datetime values have the same timezone."""
|
||||
for k in keys:
|
||||
if (value := obj.get(k)) and isinstance(value, datetime.datetime):
|
||||
obj[k] = dt.as_local(value)
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def _is_sorted(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that the specified values are sequential."""
|
||||
|
||||
def validate(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Test that all keys in the dict are in order."""
|
||||
values = []
|
||||
for k in keys:
|
||||
if not (value := obj.get(k)):
|
||||
return obj
|
||||
values.append(value)
|
||||
if all(values) and values != sorted(values):
|
||||
raise vol.Invalid(f"Values were not in order: {values}")
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
CREATE_EVENT_SERVICE = "create_event"
|
||||
CREATE_EVENT_SCHEMA = vol.All(
|
||||
cv.has_at_least_one_key(EVENT_START_DATE, EVENT_START_DATETIME, EVENT_IN),
|
||||
@@ -98,6 +147,10 @@ CREATE_EVENT_SCHEMA = vol.All(
|
||||
),
|
||||
},
|
||||
),
|
||||
_has_consistent_timezone(EVENT_START_DATETIME, EVENT_END_DATETIME),
|
||||
_as_local_timezone(EVENT_START_DATETIME, EVENT_END_DATETIME),
|
||||
_is_sorted(EVENT_START_DATE, EVENT_END_DATE),
|
||||
_is_sorted(EVENT_START_DATETIME, EVENT_END_DATETIME),
|
||||
)
|
||||
|
||||
|
||||
@@ -441,36 +494,6 @@ def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
return validate
|
||||
|
||||
|
||||
def _has_consistent_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that all datetime values have a consistent timezone."""
|
||||
|
||||
def validate(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Test that all keys that are datetime values have the same timezone."""
|
||||
values = [obj[k] for k in keys]
|
||||
if all(isinstance(value, datetime.datetime) for value in values):
|
||||
uniq_values = groupby(value.tzinfo for value in values)
|
||||
if len(list(uniq_values)) > 1:
|
||||
raise vol.Invalid(
|
||||
f"Expected all values to have the same timezone: {values}"
|
||||
)
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def _is_sorted(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that the specified values are sequential."""
|
||||
|
||||
def validate(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Test that all keys in the dict are in order."""
|
||||
values = [obj[k] for k in keys]
|
||||
if values != sorted(values):
|
||||
raise vol.Invalid(f"Values were not in order: {values}")
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "calendar/event/create",
|
||||
@@ -486,6 +509,7 @@ def _is_sorted(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
},
|
||||
_has_same_type(EVENT_START, EVENT_END),
|
||||
_has_consistent_timezone(EVENT_START, EVENT_END),
|
||||
_as_local_timezone(EVENT_START, EVENT_END),
|
||||
_is_sorted(EVENT_START, EVENT_END),
|
||||
)
|
||||
),
|
||||
@@ -582,6 +606,7 @@ async def handle_calendar_event_delete(
|
||||
},
|
||||
_has_same_type(EVENT_START, EVENT_END),
|
||||
_has_consistent_timezone(EVENT_START, EVENT_END),
|
||||
_as_local_timezone(EVENT_START, EVENT_END),
|
||||
_is_sorted(EVENT_START, EVENT_END),
|
||||
)
|
||||
),
|
||||
|
@@ -3,6 +3,7 @@
|
||||
DOMAIN = "conversation"
|
||||
|
||||
DEFAULT_EXPOSED_DOMAINS = {
|
||||
"binary_sensor",
|
||||
"climate",
|
||||
"cover",
|
||||
"fan",
|
||||
@@ -16,3 +17,5 @@ DEFAULT_EXPOSED_DOMAINS = {
|
||||
"vacuum",
|
||||
"water_heater",
|
||||
}
|
||||
|
||||
DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
|
@@ -28,7 +28,7 @@ from homeassistant.helpers import (
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent import AbstractConversationAgent, ConversationInput, ConversationResult
|
||||
from .const import DEFAULT_EXPOSED_DOMAINS, DOMAIN
|
||||
from .const import DEFAULT_EXPOSED_ATTRIBUTES, DEFAULT_EXPOSED_DOMAINS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
|
||||
@@ -227,7 +227,21 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
intent_response: intent.IntentResponse,
|
||||
recognize_result: RecognizeResult,
|
||||
) -> str:
|
||||
all_states = intent_response.matched_states + intent_response.unmatched_states
|
||||
# Make copies of the states here so we can add translated names for responses.
|
||||
matched: list[core.State] = []
|
||||
|
||||
for state in intent_response.matched_states:
|
||||
state_copy = core.State.from_dict(state.as_dict())
|
||||
if state_copy is not None:
|
||||
matched.append(state_copy)
|
||||
|
||||
unmatched: list[core.State] = []
|
||||
for state in intent_response.unmatched_states:
|
||||
state_copy = core.State.from_dict(state.as_dict())
|
||||
if state_copy is not None:
|
||||
unmatched.append(state_copy)
|
||||
|
||||
all_states = matched + unmatched
|
||||
domains = {state.domain for state in all_states}
|
||||
translations = await translation.async_get_translations(
|
||||
self.hass, language, "state", domains
|
||||
@@ -262,13 +276,11 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
"query": {
|
||||
# Entity states that matched the query (e.g, "on")
|
||||
"matched": [
|
||||
template.TemplateState(self.hass, state)
|
||||
for state in intent_response.matched_states
|
||||
template.TemplateState(self.hass, state) for state in matched
|
||||
],
|
||||
# Entity states that did not match the query
|
||||
"unmatched": [
|
||||
template.TemplateState(self.hass, state)
|
||||
for state in intent_response.unmatched_states
|
||||
template.TemplateState(self.hass, state) for state in unmatched
|
||||
],
|
||||
},
|
||||
}
|
||||
@@ -467,6 +479,12 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
for state in states:
|
||||
# Checked against "requires_context" and "excludes_context" in hassil
|
||||
context = {"domain": state.domain}
|
||||
if state.attributes:
|
||||
# Include some attributes
|
||||
for attr_key, attr_value in state.attributes.items():
|
||||
if attr_key not in DEFAULT_EXPOSED_ATTRIBUTES:
|
||||
continue
|
||||
context[attr_key] = attr_value
|
||||
|
||||
entity = entities.async_get(state.entity_id)
|
||||
if entity is not None:
|
||||
@@ -506,6 +524,9 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
for alias in area.aliases:
|
||||
area_names.append((alias, area.id))
|
||||
|
||||
_LOGGER.debug("Exposed areas: %s", area_names)
|
||||
_LOGGER.debug("Exposed entities: %s", entity_names)
|
||||
|
||||
self._slot_lists = {
|
||||
"area": TextSlotList.from_tuples(area_names, allow_template=False),
|
||||
"name": TextSlotList.from_tuples(entity_names, allow_template=False),
|
||||
|
@@ -8,6 +8,7 @@ import voluptuous as vol
|
||||
from homeassistant.const import CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import ConditionProtocol, trace_condition_function
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import DeviceAutomationType, async_get_device_automation_platform
|
||||
@@ -17,24 +18,13 @@ if TYPE_CHECKING:
|
||||
from homeassistant.helpers import condition
|
||||
|
||||
|
||||
class DeviceAutomationConditionProtocol(Protocol):
|
||||
class DeviceAutomationConditionProtocol(ConditionProtocol, Protocol):
|
||||
"""Define the format of device_condition modules.
|
||||
|
||||
Each module must define either CONDITION_SCHEMA or async_validate_condition_config.
|
||||
Each module must define either CONDITION_SCHEMA or async_validate_condition_config
|
||||
from ConditionProtocol.
|
||||
"""
|
||||
|
||||
CONDITION_SCHEMA: vol.Schema
|
||||
|
||||
async def async_validate_condition_config(
|
||||
self, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
|
||||
def async_condition_from_config(
|
||||
self, hass: HomeAssistant, config: ConfigType
|
||||
) -> condition.ConditionCheckerType:
|
||||
"""Evaluate state based on configuration."""
|
||||
|
||||
async def async_get_condition_capabilities(
|
||||
self, hass: HomeAssistant, config: ConfigType
|
||||
) -> dict[str, vol.Schema]:
|
||||
@@ -62,4 +52,4 @@ async def async_condition_from_config(
|
||||
platform = await async_get_device_automation_platform(
|
||||
hass, config[CONF_DOMAIN], DeviceAutomationType.CONDITION
|
||||
)
|
||||
return platform.async_condition_from_config(hass, config)
|
||||
return trace_condition_function(platform.async_condition_from_config(hass, config))
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env_canada==0.5.28"]
|
||||
"requirements": ["env_canada==0.5.29"]
|
||||
}
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol"],
|
||||
"requirements": ["aioesphomeapi==13.4.0", "esphome-dashboard-api==1.2.3"],
|
||||
"requirements": ["aioesphomeapi==13.4.1", "esphome-dashboard-api==1.2.3"],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
@@ -87,14 +87,23 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass, STARTUP_SCAN_TIMEOUT
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_start_background_discovery(*_: Any) -> None:
|
||||
"""Run discovery in the background."""
|
||||
hass.async_create_background_task(_async_discovery(), "flux_led-discovery")
|
||||
|
||||
async def _async_discovery(*_: Any) -> None:
|
||||
async_trigger_discovery(
|
||||
hass, await async_discover_devices(hass, DISCOVER_SCAN_TIMEOUT)
|
||||
)
|
||||
|
||||
async_trigger_discovery(hass, domain_data[FLUX_LED_DISCOVERY])
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _async_discovery)
|
||||
async_track_time_interval(hass, _async_discovery, DISCOVERY_INTERVAL)
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STARTED, _async_start_background_discovery
|
||||
)
|
||||
async_track_time_interval(
|
||||
hass, _async_start_background_discovery, DISCOVERY_INTERVAL
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -341,6 +341,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
is_dev = repo_path is not None
|
||||
root_path = _frontend_root(repo_path)
|
||||
|
||||
if is_dev:
|
||||
from .dev import async_setup_frontend_dev
|
||||
|
||||
async_setup_frontend_dev(hass)
|
||||
|
||||
for path, should_cache in (
|
||||
("service_worker.js", False),
|
||||
("robots.txt", False),
|
||||
|
60
homeassistant/components/frontend/dev.py
Normal file
60
homeassistant/components/frontend/dev.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Development helpers for the frontend."""
|
||||
import aiohttp
|
||||
from aiohttp import hdrs, web
|
||||
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_frontend_dev(hass: HomeAssistant) -> None:
|
||||
"""Set up frontend dev views."""
|
||||
hass.http.register_view( # type: ignore
|
||||
FrontendDevView(
|
||||
"http://localhost:8000", aiohttp_client.async_get_clientsession(hass)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
FILTER_RESPONSE_HEADERS = {hdrs.CONTENT_LENGTH, hdrs.CONTENT_ENCODING}
|
||||
|
||||
|
||||
class FrontendDevView(HomeAssistantView):
|
||||
"""Frontend dev view."""
|
||||
|
||||
name = "_dev:frontend"
|
||||
url = "/_dev_frontend/{path:.*}"
|
||||
requires_auth = False
|
||||
extra_urls = ["/__web-dev-server__/{path:.*}"]
|
||||
|
||||
def __init__(self, forward_base: str, websession: aiohttp.ClientSession):
|
||||
"""Initialize a Hass.io ingress view."""
|
||||
self._forward_base = forward_base
|
||||
self._websession = websession
|
||||
|
||||
async def get(self, request: web.Request, path: str) -> web.Response:
|
||||
"""Frontend routing."""
|
||||
# To deal with: import * as commonjsHelpers from '/__web-dev-server__/rollup/commonjsHelpers.js
|
||||
if request.path.startswith("/__web-dev-server__/"):
|
||||
path = f"__web-dev-server__/{path}"
|
||||
|
||||
url = f"{self._forward_base}/{path}"
|
||||
|
||||
if request.query_string:
|
||||
url += f"?{request.query_string}"
|
||||
|
||||
async with self._websession.get(
|
||||
url,
|
||||
headers=request.headers,
|
||||
allow_redirects=False,
|
||||
) as result:
|
||||
return web.Response(
|
||||
headers={
|
||||
hdr: val
|
||||
for hdr, val in result.headers.items()
|
||||
if hdr not in FILTER_RESPONSE_HEADERS
|
||||
},
|
||||
status=result.status,
|
||||
body=await result.read(),
|
||||
)
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20230222.0"]
|
||||
"requirements": ["home-assistant-frontend==20230227.0"]
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/garages_amsterdam",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["odp-amsterdam==5.0.1"]
|
||||
"requirements": ["odp-amsterdam==5.1.0"]
|
||||
}
|
||||
|
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"domain": "hassio",
|
||||
"name": "Home Assistant Supervisor",
|
||||
"after_dependencies": ["panel_custom"],
|
||||
"codeowners": ["@home-assistant/supervisor"],
|
||||
"dependencies": ["http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Config flow for HLK-SW16."""
|
||||
import asyncio
|
||||
|
||||
import async_timeout
|
||||
from hlk_sw16 import create_hlk_sw16_connection
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -35,7 +36,8 @@ async def connect_client(hass, user_input):
|
||||
reconnect_interval=DEFAULT_RECONNECT_INTERVAL,
|
||||
keep_alive_interval=DEFAULT_KEEP_ALIVE_INTERVAL,
|
||||
)
|
||||
return await asyncio.wait_for(client_aw, timeout=CONNECTION_TIMEOUT)
|
||||
async with async_timeout.timeout(CONNECTION_TIMEOUT):
|
||||
return await client_aw
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, user_input):
|
||||
|
@@ -14,6 +14,7 @@ PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.LIGHT,
|
||||
Platform.LOCK,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.WEATHER,
|
||||
|
39
homeassistant/components/homematicip_cloud/helpers.py
Normal file
39
homeassistant/components/homematicip_cloud/helpers.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Helper functions for Homematicip Cloud Integration."""
|
||||
|
||||
from functools import wraps
|
||||
import json
|
||||
import logging
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from . import HomematicipGenericEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_error_response(response) -> bool:
|
||||
"""Response from async call contains errors or not."""
|
||||
if isinstance(response, dict):
|
||||
return response.get("errorCode") not in ("", None)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def handle_errors(func):
|
||||
"""Handle async errors."""
|
||||
|
||||
@wraps(func)
|
||||
async def inner(self: HomematicipGenericEntity) -> None:
|
||||
"""Handle errors from async call."""
|
||||
result = await func(self)
|
||||
if is_error_response(result):
|
||||
_LOGGER.error(
|
||||
"Error while execute function %s: %s",
|
||||
__name__,
|
||||
json.dumps(result),
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"Error while execute function {func.__name__}: {result.get('errorCode')}. See log for more information."
|
||||
)
|
||||
|
||||
return inner
|
95
homeassistant/components/homematicip_cloud/lock.py
Normal file
95
homeassistant/components/homematicip_cloud/lock.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Support for HomematicIP Cloud lock devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homematicip.aio.device import AsyncDoorLockDrive
|
||||
from homematicip.base.enums import LockState, MotorState
|
||||
|
||||
from homeassistant.components.lock import LockEntity, LockEntityFeature
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity
|
||||
from .helpers import handle_errors
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_AUTO_RELOCK_DELAY = "auto_relock_delay"
|
||||
ATTR_DOOR_HANDLE_TYPE = "door_handle_type"
|
||||
ATTR_DOOR_LOCK_DIRECTION = "door_lock_direction"
|
||||
ATTR_DOOR_LOCK_NEUTRAL_POSITION = "door_lock_neutral_position"
|
||||
ATTR_DOOR_LOCK_TURNS = "door_lock_turns"
|
||||
|
||||
DEVICE_DLD_ATTRIBUTES = {
|
||||
"autoRelockDelay": ATTR_AUTO_RELOCK_DELAY,
|
||||
"doorHandleType": ATTR_DOOR_HANDLE_TYPE,
|
||||
"doorLockDirection": ATTR_DOOR_LOCK_DIRECTION,
|
||||
"doorLockNeutralPosition": ATTR_DOOR_LOCK_NEUTRAL_POSITION,
|
||||
"doorLockTurns": ATTR_DOOR_LOCK_TURNS,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the HomematicIP locks from a config entry."""
|
||||
hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id]
|
||||
|
||||
async_add_entities(
|
||||
HomematicipDoorLockDrive(hap, device)
|
||||
for device in hap.home.devices
|
||||
if isinstance(device, AsyncDoorLockDrive)
|
||||
)
|
||||
|
||||
|
||||
class HomematicipDoorLockDrive(HomematicipGenericEntity, LockEntity):
|
||||
"""Representation of the HomematicIP DoorLockDrive."""
|
||||
|
||||
_attr_supported_features = LockEntityFeature.OPEN
|
||||
|
||||
@property
|
||||
def is_locked(self) -> bool | None:
|
||||
"""Return true if device is locked."""
|
||||
return (
|
||||
self._device.lockState == LockState.LOCKED
|
||||
and self._device.motorState == MotorState.STOPPED
|
||||
)
|
||||
|
||||
@property
|
||||
def is_locking(self) -> bool:
|
||||
"""Return true if device is locking."""
|
||||
return self._device.motorState == MotorState.CLOSING
|
||||
|
||||
@property
|
||||
def is_unlocking(self) -> bool:
|
||||
"""Return true if device is unlocking."""
|
||||
return self._device.motorState == MotorState.OPENING
|
||||
|
||||
@handle_errors
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Lock the device."""
|
||||
return await self._device.set_lock_state(LockState.LOCKED)
|
||||
|
||||
@handle_errors
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Unlock the device."""
|
||||
return await self._device.set_lock_state(LockState.UNLOCKED)
|
||||
|
||||
@handle_errors
|
||||
async def async_open(self, **kwargs: Any) -> None:
|
||||
"""Open the door latch."""
|
||||
return await self._device.set_lock_state(LockState.OPEN)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the device."""
|
||||
return super().extra_state_attributes | {
|
||||
attr_key: attr_value
|
||||
for attr, attr_key in DEVICE_DLD_ATTRIBUTES.items()
|
||||
if (attr_value := getattr(self._device, attr, None)) is not None
|
||||
}
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/honeywell",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["somecomfort"],
|
||||
"requirements": ["aiosomecomfort==0.0.8"]
|
||||
"requirements": ["aiosomecomfort==0.0.10"]
|
||||
}
|
||||
|
@@ -7,6 +7,13 @@
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "The Honeywell integration needs to re-authenticate your account",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@@ -1,22 +1,13 @@
|
||||
"""The islamic_prayer_times component."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from prayer_times_calculator import PrayerTimesCalculator, exceptions
|
||||
from requests.exceptions import ConnectionError as ConnError
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import CONF_CALC_METHOD, DATA_UPDATED, DEFAULT_CALC_METHOD, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import DOMAIN
|
||||
from .coordinator import IslamicPrayerDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
@@ -25,154 +16,32 @@ CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up the Islamic Prayer Component."""
|
||||
client = IslamicPrayerClient(hass, config_entry)
|
||||
hass.data[DOMAIN] = client
|
||||
await client.async_setup()
|
||||
coordinator = IslamicPrayerDataUpdateCoordinator(hass)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, coordinator)
|
||||
config_entry.async_on_unload(
|
||||
config_entry.add_update_listener(async_options_updated)
|
||||
)
|
||||
hass.config_entries.async_setup_platforms(config_entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Unload Islamic Prayer entry from config_entry."""
|
||||
if hass.data[DOMAIN].event_unsub:
|
||||
hass.data[DOMAIN].event_unsub()
|
||||
hass.data.pop(DOMAIN)
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(
|
||||
config_entry, PLATFORMS
|
||||
):
|
||||
coordinator: IslamicPrayerDataUpdateCoordinator = hass.data.pop(DOMAIN)
|
||||
if coordinator.event_unsub:
|
||||
coordinator.event_unsub()
|
||||
return unload_ok
|
||||
|
||||
|
||||
class IslamicPrayerClient:
|
||||
"""Islamic Prayer Client Object."""
|
||||
|
||||
def __init__(self, hass, config_entry):
|
||||
"""Initialize the Islamic Prayer client."""
|
||||
self.hass = hass
|
||||
self.config_entry = config_entry
|
||||
self.prayer_times_info = {}
|
||||
self.available = True
|
||||
self.event_unsub = None
|
||||
|
||||
@property
|
||||
def calc_method(self):
|
||||
"""Return the calculation method."""
|
||||
return self.config_entry.options[CONF_CALC_METHOD]
|
||||
|
||||
def get_new_prayer_times(self):
|
||||
"""Fetch prayer times for today."""
|
||||
calc = PrayerTimesCalculator(
|
||||
latitude=self.hass.config.latitude,
|
||||
longitude=self.hass.config.longitude,
|
||||
calculation_method=self.calc_method,
|
||||
date=str(dt_util.now().date()),
|
||||
)
|
||||
return calc.fetch_prayer_times()
|
||||
|
||||
async def async_schedule_future_update(self):
|
||||
"""Schedule future update for sensors.
|
||||
|
||||
Midnight is a calculated time. The specifics of the calculation
|
||||
depends on the method of the prayer time calculation. This calculated
|
||||
midnight is the time at which the time to pray the Isha prayers have
|
||||
expired.
|
||||
|
||||
Calculated Midnight: The Islamic midnight.
|
||||
Traditional Midnight: 12:00AM
|
||||
|
||||
Update logic for prayer times:
|
||||
|
||||
If the Calculated Midnight is before the traditional midnight then wait
|
||||
until the traditional midnight to run the update. This way the day
|
||||
will have changed over and we don't need to do any fancy calculations.
|
||||
|
||||
If the Calculated Midnight is after the traditional midnight, then wait
|
||||
until after the calculated Midnight. We don't want to update the prayer
|
||||
times too early or else the timings might be incorrect.
|
||||
|
||||
Example:
|
||||
calculated midnight = 11:23PM (before traditional midnight)
|
||||
Update time: 12:00AM
|
||||
|
||||
calculated midnight = 1:35AM (after traditional midnight)
|
||||
update time: 1:36AM.
|
||||
|
||||
"""
|
||||
_LOGGER.debug("Scheduling next update for Islamic prayer times")
|
||||
|
||||
now = dt_util.utcnow()
|
||||
|
||||
midnight_dt = self.prayer_times_info["Midnight"]
|
||||
|
||||
if now > dt_util.as_utc(midnight_dt):
|
||||
next_update_at = midnight_dt + timedelta(days=1, minutes=1)
|
||||
_LOGGER.debug(
|
||||
"Midnight is after day the changes so schedule update for after"
|
||||
" Midnight the next day"
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Midnight is before the day changes so schedule update for the next"
|
||||
" start of day"
|
||||
)
|
||||
next_update_at = dt_util.start_of_local_day(now + timedelta(days=1))
|
||||
|
||||
_LOGGER.info("Next update scheduled for: %s", next_update_at)
|
||||
|
||||
self.event_unsub = async_track_point_in_time(
|
||||
self.hass, self.async_update, next_update_at
|
||||
)
|
||||
|
||||
async def async_update(self, *_):
|
||||
"""Update sensors with new prayer times."""
|
||||
try:
|
||||
prayer_times = await self.hass.async_add_executor_job(
|
||||
self.get_new_prayer_times
|
||||
)
|
||||
self.available = True
|
||||
except (exceptions.InvalidResponseError, ConnError):
|
||||
self.available = False
|
||||
_LOGGER.debug("Error retrieving prayer times")
|
||||
async_call_later(self.hass, 60, self.async_update)
|
||||
return
|
||||
|
||||
for prayer, time in prayer_times.items():
|
||||
self.prayer_times_info[prayer] = dt_util.parse_datetime(
|
||||
f"{dt_util.now().date()} {time}"
|
||||
)
|
||||
await self.async_schedule_future_update()
|
||||
|
||||
_LOGGER.debug("New prayer times retrieved. Updating sensors")
|
||||
async_dispatcher_send(self.hass, DATA_UPDATED)
|
||||
|
||||
async def async_setup(self):
|
||||
"""Set up the Islamic prayer client."""
|
||||
await self.async_add_options()
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(self.get_new_prayer_times)
|
||||
except (exceptions.InvalidResponseError, ConnError) as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
await self.async_update()
|
||||
self.config_entry.add_update_listener(self.async_options_updated)
|
||||
|
||||
await self.hass.config_entries.async_forward_entry_setups(
|
||||
self.config_entry, PLATFORMS
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
async def async_add_options(self):
|
||||
"""Add options for entry."""
|
||||
if not self.config_entry.options:
|
||||
data = dict(self.config_entry.data)
|
||||
calc_method = data.pop(CONF_CALC_METHOD, DEFAULT_CALC_METHOD)
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry, data=data, options={CONF_CALC_METHOD: calc_method}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def async_options_updated(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Triggered by config entry options updates."""
|
||||
if hass.data[DOMAIN].event_unsub:
|
||||
hass.data[DOMAIN].event_unsub()
|
||||
await hass.data[DOMAIN].async_update()
|
||||
async def async_options_updated(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Triggered by config entry options updates."""
|
||||
coordinator: IslamicPrayerDataUpdateCoordinator = hass.data[DOMAIN]
|
||||
if coordinator.event_unsub:
|
||||
coordinator.event_unsub()
|
||||
await coordinator.async_request_refresh()
|
||||
|
@@ -1,10 +1,13 @@
|
||||
"""Config flow for Islamic Prayer Times integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .const import CALC_METHODS, CONF_CALC_METHOD, DEFAULT_CALC_METHOD, DOMAIN, NAME
|
||||
|
||||
@@ -22,7 +25,9 @@ class IslamicPrayerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Get the options flow for this handler."""
|
||||
return IslamicPrayerOptionsFlowHandler(config_entry)
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
@@ -40,7 +45,9 @@ class IslamicPrayerOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(self, user_input=None):
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Manage options."""
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
@@ -1,23 +1,12 @@
|
||||
"""Constants for the Islamic Prayer component."""
|
||||
from typing import Final
|
||||
|
||||
from prayer_times_calculator import PrayerTimesCalculator
|
||||
|
||||
DOMAIN = "islamic_prayer_times"
|
||||
NAME = "Islamic Prayer Times"
|
||||
PRAYER_TIMES_ICON = "mdi:calendar-clock"
|
||||
DOMAIN: Final = "islamic_prayer_times"
|
||||
NAME: Final = "Islamic Prayer Times"
|
||||
|
||||
SENSOR_TYPES = {
|
||||
"Fajr": "prayer",
|
||||
"Sunrise": "time",
|
||||
"Dhuhr": "prayer",
|
||||
"Asr": "prayer",
|
||||
"Maghrib": "prayer",
|
||||
"Isha": "prayer",
|
||||
"Midnight": "time",
|
||||
}
|
||||
|
||||
CONF_CALC_METHOD = "calculation_method"
|
||||
CONF_CALC_METHOD: Final = "calculation_method"
|
||||
|
||||
CALC_METHODS: list[str] = list(PrayerTimesCalculator.CALCULATION_METHODS)
|
||||
DEFAULT_CALC_METHOD = "isna"
|
||||
|
||||
DATA_UPDATED = "Islamic_prayer_data_updated"
|
||||
DEFAULT_CALC_METHOD: Final = "isna"
|
||||
|
121
homeassistant/components/islamic_prayer_times/coordinator.py
Normal file
121
homeassistant/components/islamic_prayer_times/coordinator.py
Normal file
@@ -0,0 +1,121 @@
|
||||
"""Coordinator for the Islamic prayer times integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
|
||||
from prayer_times_calculator import PrayerTimesCalculator, exceptions
|
||||
from requests.exceptions import ConnectionError as ConnError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import CONF_CALC_METHOD, DEFAULT_CALC_METHOD, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IslamicPrayerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, datetime]]):
|
||||
"""Islamic Prayer Client Object."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the Islamic Prayer client."""
|
||||
self.event_unsub: CALLBACK_TYPE | None = None
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
)
|
||||
|
||||
@property
|
||||
def calc_method(self) -> str:
|
||||
"""Return the calculation method."""
|
||||
return self.config_entry.options.get(CONF_CALC_METHOD, DEFAULT_CALC_METHOD)
|
||||
|
||||
def get_new_prayer_times(self) -> dict[str, str]:
|
||||
"""Fetch prayer times for today."""
|
||||
calc = PrayerTimesCalculator(
|
||||
latitude=self.hass.config.latitude,
|
||||
longitude=self.hass.config.longitude,
|
||||
calculation_method=self.calc_method,
|
||||
date=str(dt_util.now().date()),
|
||||
)
|
||||
return calc.fetch_prayer_times()
|
||||
|
||||
@callback
|
||||
def async_schedule_future_update(self, midnight_dt: datetime) -> None:
|
||||
"""Schedule future update for sensors.
|
||||
|
||||
Midnight is a calculated time. The specifics of the calculation
|
||||
depends on the method of the prayer time calculation. This calculated
|
||||
midnight is the time at which the time to pray the Isha prayers have
|
||||
expired.
|
||||
|
||||
Calculated Midnight: The Islamic midnight.
|
||||
Traditional Midnight: 12:00AM
|
||||
|
||||
Update logic for prayer times:
|
||||
|
||||
If the Calculated Midnight is before the traditional midnight then wait
|
||||
until the traditional midnight to run the update. This way the day
|
||||
will have changed over and we don't need to do any fancy calculations.
|
||||
|
||||
If the Calculated Midnight is after the traditional midnight, then wait
|
||||
until after the calculated Midnight. We don't want to update the prayer
|
||||
times too early or else the timings might be incorrect.
|
||||
|
||||
Example:
|
||||
calculated midnight = 11:23PM (before traditional midnight)
|
||||
Update time: 12:00AM
|
||||
|
||||
calculated midnight = 1:35AM (after traditional midnight)
|
||||
update time: 1:36AM.
|
||||
|
||||
"""
|
||||
_LOGGER.debug("Scheduling next update for Islamic prayer times")
|
||||
|
||||
now = dt_util.utcnow()
|
||||
|
||||
if now > midnight_dt:
|
||||
next_update_at = midnight_dt + timedelta(days=1, minutes=1)
|
||||
_LOGGER.debug(
|
||||
"Midnight is after the day changes so schedule update for after Midnight the next day"
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Midnight is before the day changes so schedule update for the next start of day"
|
||||
)
|
||||
next_update_at = dt_util.start_of_local_day(now + timedelta(days=1))
|
||||
|
||||
_LOGGER.debug("Next update scheduled for: %s", next_update_at)
|
||||
|
||||
self.event_unsub = async_track_point_in_time(
|
||||
self.hass, self.async_request_update, next_update_at
|
||||
)
|
||||
|
||||
async def async_request_update(self, *_) -> None:
|
||||
"""Request update from coordinator."""
|
||||
await self.async_request_refresh()
|
||||
|
||||
async def _async_update_data(self) -> dict[str, datetime]:
|
||||
"""Update sensors with new prayer times."""
|
||||
try:
|
||||
prayer_times = await self.hass.async_add_executor_job(
|
||||
self.get_new_prayer_times
|
||||
)
|
||||
except (exceptions.InvalidResponseError, ConnError) as err:
|
||||
async_call_later(self.hass, 60, self.async_request_update)
|
||||
raise UpdateFailed from err
|
||||
|
||||
prayer_times_info: dict[str, datetime] = {}
|
||||
for prayer, time in prayer_times.items():
|
||||
if prayer_time := dt_util.parse_datetime(f"{dt_util.now().date()} {time}"):
|
||||
prayer_times_info[prayer] = dt_util.as_utc(prayer_time)
|
||||
|
||||
self.async_schedule_future_update(prayer_times_info["Midnight"])
|
||||
return prayer_times_info
|
@@ -1,12 +1,51 @@
|
||||
"""Platform to retrieve Islamic prayer times information for Home Assistant."""
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DATA_UPDATED, DOMAIN, PRAYER_TIMES_ICON, SENSOR_TYPES
|
||||
from . import IslamicPrayerDataUpdateCoordinator
|
||||
from .const import DOMAIN, NAME
|
||||
|
||||
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="Fajr",
|
||||
name="Fajr prayer",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="Sunrise",
|
||||
name="Sunrise time",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="Dhuhr",
|
||||
name="Dhuhr prayer",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="Asr",
|
||||
name="Asr prayer",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="Maghrib",
|
||||
name="Maghrib prayer",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="Isha",
|
||||
name="Isha prayer",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="Midnight",
|
||||
name="Midnight time",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -16,46 +55,38 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Islamic prayer times sensor platform."""
|
||||
|
||||
client = hass.data[DOMAIN]
|
||||
coordinator: IslamicPrayerDataUpdateCoordinator = hass.data[DOMAIN]
|
||||
|
||||
entities = []
|
||||
for sensor_type in SENSOR_TYPES:
|
||||
entities.append(IslamicPrayerTimeSensor(sensor_type, client))
|
||||
|
||||
async_add_entities(entities, True)
|
||||
async_add_entities(
|
||||
IslamicPrayerTimeSensor(coordinator, description)
|
||||
for description in SENSOR_TYPES
|
||||
)
|
||||
|
||||
|
||||
class IslamicPrayerTimeSensor(SensorEntity):
|
||||
class IslamicPrayerTimeSensor(
|
||||
CoordinatorEntity[IslamicPrayerDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Representation of an Islamic prayer time sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TIMESTAMP
|
||||
_attr_icon = PRAYER_TIMES_ICON
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, sensor_type, client):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: IslamicPrayerDataUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the Islamic prayer time sensor."""
|
||||
self.sensor_type = sensor_type
|
||||
self.client = client
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = description.key
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
|
||||
name=NAME,
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return f"{self.sensor_type} {SENSOR_TYPES[self.sensor_type]}"
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the unique id of the entity."""
|
||||
return self.sensor_type
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> datetime:
|
||||
"""Return the state of the sensor."""
|
||||
return self.client.prayer_times_info.get(self.sensor_type).astimezone(
|
||||
dt_util.UTC
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(self.hass, DATA_UPDATED, self.async_write_ha_state)
|
||||
)
|
||||
return self.coordinator.data[self.entity_description.key]
|
||||
|
@@ -8,16 +8,43 @@ from pyisy.constants import ISY_VALUE_UNKNOWN
|
||||
from homeassistant.components.lock import LockEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .entity import ISYNodeEntity, ISYProgramEntity
|
||||
from .services import (
|
||||
SERVICE_DELETE_USER_CODE_SCHEMA,
|
||||
SERVICE_DELETE_ZWAVE_LOCK_USER_CODE,
|
||||
SERVICE_SET_USER_CODE_SCHEMA,
|
||||
SERVICE_SET_ZWAVE_LOCK_USER_CODE,
|
||||
)
|
||||
|
||||
VALUE_TO_STATE = {0: False, 100: True}
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_lock_services(hass: HomeAssistant) -> None:
|
||||
"""Create lock-specific services for the ISY Integration."""
|
||||
platform = async_get_current_platform()
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_ZWAVE_LOCK_USER_CODE,
|
||||
SERVICE_SET_USER_CODE_SCHEMA,
|
||||
"async_set_zwave_lock_user_code",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_DELETE_ZWAVE_LOCK_USER_CODE,
|
||||
SERVICE_DELETE_USER_CODE_SCHEMA,
|
||||
"async_delete_zwave_lock_user_code",
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
@@ -32,6 +59,7 @@ async def async_setup_entry(
|
||||
entities.append(ISYLockProgramEntity(name, status, actions))
|
||||
|
||||
async_add_entities(entities)
|
||||
async_setup_lock_services(hass)
|
||||
|
||||
|
||||
class ISYLockEntity(ISYNodeEntity, LockEntity):
|
||||
@@ -47,12 +75,26 @@ class ISYLockEntity(ISYNodeEntity, LockEntity):
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Send the lock command to the ISY device."""
|
||||
if not await self._node.secure_lock():
|
||||
_LOGGER.error("Unable to lock device")
|
||||
raise HomeAssistantError(f"Unable to lock device {self._node.address}")
|
||||
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Send the unlock command to the ISY device."""
|
||||
if not await self._node.secure_unlock():
|
||||
_LOGGER.error("Unable to lock device")
|
||||
raise HomeAssistantError(f"Unable to unlock device {self._node.address}")
|
||||
|
||||
async def async_set_zwave_lock_user_code(self, user_num: int, code: int) -> None:
|
||||
"""Set a user lock code for a Z-Wave Lock."""
|
||||
if not await self._node.set_zwave_lock_code(user_num, code):
|
||||
raise HomeAssistantError(
|
||||
f"Could not set user code {user_num} for {self._node.address}"
|
||||
)
|
||||
|
||||
async def async_delete_zwave_lock_user_code(self, user_num: int) -> None:
|
||||
"""Delete a user lock code for a Z-Wave Lock."""
|
||||
if not await self._node.delete_zwave_lock_code(user_num):
|
||||
raise HomeAssistantError(
|
||||
f"Could not delete user code {user_num} for {self._node.address}"
|
||||
)
|
||||
|
||||
|
||||
class ISYLockProgramEntity(ISYProgramEntity, LockEntity):
|
||||
@@ -66,9 +108,9 @@ class ISYLockProgramEntity(ISYProgramEntity, LockEntity):
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Lock the device."""
|
||||
if not await self._actions.run_then():
|
||||
_LOGGER.error("Unable to lock device")
|
||||
raise HomeAssistantError(f"Unable to lock device {self._node.address}")
|
||||
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Unlock the device."""
|
||||
if not await self._actions.run_else():
|
||||
_LOGGER.error("Unable to unlock device")
|
||||
raise HomeAssistantError(f"Unable to unlock device {self._node.address}")
|
||||
|
@@ -24,7 +24,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyisy"],
|
||||
"requirements": ["pyisy==3.1.13"],
|
||||
"requirements": ["pyisy==3.1.14"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Universal Devices Inc.",
|
||||
|
@@ -52,8 +52,14 @@ SERVICE_RENAME_NODE = "rename_node"
|
||||
SERVICE_SET_ON_LEVEL = "set_on_level"
|
||||
SERVICE_SET_RAMP_RATE = "set_ramp_rate"
|
||||
|
||||
# Services valid only for Z-Wave Locks
|
||||
SERVICE_SET_ZWAVE_LOCK_USER_CODE = "set_zwave_lock_user_code"
|
||||
SERVICE_DELETE_ZWAVE_LOCK_USER_CODE = "delete_zwave_lock_user_code"
|
||||
|
||||
CONF_PARAMETER = "parameter"
|
||||
CONF_PARAMETERS = "parameters"
|
||||
CONF_USER_NUM = "user_num"
|
||||
CONF_CODE = "code"
|
||||
CONF_VALUE = "value"
|
||||
CONF_INIT = "init"
|
||||
CONF_ISY = "isy"
|
||||
@@ -129,6 +135,13 @@ SERVICE_SET_ZWAVE_PARAMETER_SCHEMA = {
|
||||
vol.Required(CONF_SIZE): vol.All(vol.Coerce(int), vol.In(VALID_PARAMETER_SIZES)),
|
||||
}
|
||||
|
||||
SERVICE_SET_USER_CODE_SCHEMA = {
|
||||
vol.Required(CONF_USER_NUM): vol.Coerce(int),
|
||||
vol.Required(CONF_CODE): vol.Coerce(int),
|
||||
}
|
||||
|
||||
SERVICE_DELETE_USER_CODE_SCHEMA = {vol.Required(CONF_USER_NUM): vol.Coerce(int)}
|
||||
|
||||
SERVICE_SET_VARIABLE_SCHEMA = vol.All(
|
||||
cv.has_at_least_one_key(CONF_ADDRESS, CONF_TYPE, CONF_NAME),
|
||||
vol.Schema(
|
||||
|
@@ -118,6 +118,52 @@ set_zwave_parameter:
|
||||
- "1"
|
||||
- "2"
|
||||
- "4"
|
||||
set_zwave_lock_user_code:
|
||||
name: Set Z-Wave Lock User Code
|
||||
description: >-
|
||||
Set a Z-Wave Lock User Code via the ISY.
|
||||
target:
|
||||
entity:
|
||||
integration: isy994
|
||||
domain: lock
|
||||
fields:
|
||||
user_num:
|
||||
name: User Number
|
||||
description: The user slot number on the lock
|
||||
required: true
|
||||
example: 8
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 255
|
||||
code:
|
||||
name: Code
|
||||
description: The code to set for the user.
|
||||
required: true
|
||||
example: 33491663
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 99999999
|
||||
mode: box
|
||||
delete_zwave_lock_user_code:
|
||||
name: Delete Z-Wave Lock User Code
|
||||
description: >-
|
||||
Delete a Z-Wave Lock User Code via the ISY.
|
||||
target:
|
||||
entity:
|
||||
integration: isy994
|
||||
domain: lock
|
||||
fields:
|
||||
user_num:
|
||||
name: User Number
|
||||
description: The user slot number on the lock
|
||||
required: true
|
||||
example: 8
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 255
|
||||
rename_node:
|
||||
name: Rename Node on ISY
|
||||
description: >-
|
||||
|
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["xknx"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["xknx==2.5.0"]
|
||||
"requirements": ["xknx==2.6.0"]
|
||||
}
|
||||
|
@@ -17,10 +17,9 @@ from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PORT,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
@@ -167,15 +166,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
We do not want the discovery task to block startup.
|
||||
"""
|
||||
task = asyncio.create_task(discovery_manager.async_discovery())
|
||||
|
||||
@callback
|
||||
def _async_stop(_: Event) -> None:
|
||||
if not task.done():
|
||||
task.cancel()
|
||||
|
||||
# Task must be shut down when home assistant is closing
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_stop)
|
||||
hass.async_create_background_task(
|
||||
discovery_manager.async_discovery(), "lifx-discovery"
|
||||
)
|
||||
|
||||
# Let the system settle a bit before starting discovery
|
||||
# to reduce the risk we miss devices because the event
|
||||
|
@@ -6,7 +6,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_PORT, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -63,7 +63,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
system.on_connected_changed(handle_connected_changed)
|
||||
|
||||
async def handle_stop(event) -> None:
|
||||
async def handle_stop(event: Event) -> None:
|
||||
await system.close()
|
||||
|
||||
entry.async_on_unload(
|
||||
|
@@ -76,7 +76,7 @@ class LiteJetConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_data):
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> FlowResult:
|
||||
"""Import litejet config from configuration.yaml."""
|
||||
return self.async_create_entry(title=import_data[CONF_PORT], data=import_data)
|
||||
|
||||
|
@@ -2,6 +2,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
from pylitejet import LiteJet
|
||||
import voluptuous as vol
|
||||
@@ -42,7 +44,7 @@ async def async_attach_trigger(
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Listen for events based on configuration."""
|
||||
trigger_data = trigger_info["trigger_data"]
|
||||
number = config.get(CONF_NUMBER)
|
||||
number = cast(int, config[CONF_NUMBER])
|
||||
held_more_than = config.get(CONF_HELD_MORE_THAN)
|
||||
held_less_than = config.get(CONF_HELD_LESS_THAN)
|
||||
pressed_time = None
|
||||
@@ -50,7 +52,7 @@ async def async_attach_trigger(
|
||||
job = HassJob(action)
|
||||
|
||||
@callback
|
||||
def call_action():
|
||||
def call_action() -> None:
|
||||
"""Call action with right context."""
|
||||
hass.async_run_hass_job(
|
||||
job,
|
||||
@@ -72,11 +74,11 @@ async def async_attach_trigger(
|
||||
# neither: trigger on pressed
|
||||
|
||||
@callback
|
||||
def pressed_more_than_satisfied(now):
|
||||
def pressed_more_than_satisfied(now: datetime) -> None:
|
||||
"""Handle the LiteJet's switch's button pressed >= held_more_than."""
|
||||
call_action()
|
||||
|
||||
def pressed():
|
||||
def pressed() -> None:
|
||||
"""Handle the press of the LiteJet switch's button."""
|
||||
nonlocal cancel_pressed_more_than, pressed_time
|
||||
nonlocal held_less_than, held_more_than
|
||||
@@ -88,10 +90,12 @@ async def async_attach_trigger(
|
||||
hass, pressed_more_than_satisfied, dt_util.utcnow() + held_more_than
|
||||
)
|
||||
|
||||
def released():
|
||||
def released() -> None:
|
||||
"""Handle the release of the LiteJet switch's button."""
|
||||
nonlocal cancel_pressed_more_than, pressed_time
|
||||
nonlocal held_less_than, held_more_than
|
||||
if pressed_time is None:
|
||||
return
|
||||
if cancel_pressed_more_than is not None:
|
||||
cancel_pressed_more_than()
|
||||
cancel_pressed_more_than = None
|
||||
@@ -110,7 +114,7 @@ async def async_attach_trigger(
|
||||
system.on_switch_released(number, released)
|
||||
|
||||
@callback
|
||||
def async_remove():
|
||||
def async_remove() -> None:
|
||||
"""Remove all subscriptions used for this trigger."""
|
||||
system.unsubscribe(pressed)
|
||||
system.unsubscribe(released)
|
||||
|
@@ -8,14 +8,15 @@ from aiolivisi import AioLivisi
|
||||
|
||||
from homeassistant import core
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client, device_registry as dr
|
||||
|
||||
from .const import DOMAIN, SWITCH_PLATFORM
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LivisiDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: Final = [SWITCH_PLATFORM]
|
||||
PLATFORMS: Final = [Platform.CLIMATE, Platform.SWITCH]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: core.HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
212
homeassistant/components/livisi/climate.py
Normal file
212
homeassistant/components/livisi/climate.py
Normal file
@@ -0,0 +1,212 @@
|
||||
"""Code to handle a Livisi Virtual Climate Control."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from aiolivisi.const import CAPABILITY_MAP
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
LIVISI_REACHABILITY_CHANGE,
|
||||
LIVISI_STATE_CHANGE,
|
||||
LOGGER,
|
||||
MAX_TEMPERATURE,
|
||||
MIN_TEMPERATURE,
|
||||
VRCC_DEVICE_TYPE,
|
||||
)
|
||||
from .coordinator import LivisiDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up climate device."""
|
||||
coordinator: LivisiDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
@callback
|
||||
def handle_coordinator_update() -> None:
|
||||
"""Add climate device."""
|
||||
shc_devices: list[dict[str, Any]] = coordinator.data
|
||||
entities: list[ClimateEntity] = []
|
||||
for device in shc_devices:
|
||||
if (
|
||||
device["type"] == VRCC_DEVICE_TYPE
|
||||
and device["id"] not in coordinator.devices
|
||||
):
|
||||
livisi_climate: ClimateEntity = create_entity(
|
||||
config_entry, device, coordinator
|
||||
)
|
||||
LOGGER.debug("Include device type: %s", device.get("type"))
|
||||
coordinator.devices.add(device["id"])
|
||||
entities.append(livisi_climate)
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
coordinator.async_add_listener(handle_coordinator_update)
|
||||
)
|
||||
|
||||
|
||||
def create_entity(
|
||||
config_entry: ConfigEntry,
|
||||
device: dict[str, Any],
|
||||
coordinator: LivisiDataUpdateCoordinator,
|
||||
) -> ClimateEntity:
|
||||
"""Create Climate Entity."""
|
||||
capabilities: Mapping[str, Any] = device[CAPABILITY_MAP]
|
||||
room_id: str = device["location"]
|
||||
room_name: str = coordinator.rooms[room_id]
|
||||
livisi_climate = LivisiClimate(
|
||||
config_entry,
|
||||
coordinator,
|
||||
unique_id=device["id"],
|
||||
manufacturer=device["manufacturer"],
|
||||
device_type=device["type"],
|
||||
target_temperature_capability=capabilities["RoomSetpoint"],
|
||||
temperature_capability=capabilities["RoomTemperature"],
|
||||
humidity_capability=capabilities["RoomHumidity"],
|
||||
room=room_name,
|
||||
)
|
||||
return livisi_climate
|
||||
|
||||
|
||||
class LivisiClimate(CoordinatorEntity[LivisiDataUpdateCoordinator], ClimateEntity):
|
||||
"""Represents the Livisi Climate."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.HEAT]
|
||||
_attr_hvac_mode = HVACMode.HEAT
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
_attr_target_temperature_high = MAX_TEMPERATURE
|
||||
_attr_target_temperature_low = MIN_TEMPERATURE
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: ConfigEntry,
|
||||
coordinator: LivisiDataUpdateCoordinator,
|
||||
unique_id: str,
|
||||
manufacturer: str,
|
||||
device_type: str,
|
||||
target_temperature_capability: str,
|
||||
temperature_capability: str,
|
||||
humidity_capability: str,
|
||||
room: str,
|
||||
) -> None:
|
||||
"""Initialize the Livisi Climate."""
|
||||
self.config_entry = config_entry
|
||||
self._attr_unique_id = unique_id
|
||||
self._target_temperature_capability = target_temperature_capability
|
||||
self._temperature_capability = temperature_capability
|
||||
self._humidity_capability = humidity_capability
|
||||
self.aio_livisi = coordinator.aiolivisi
|
||||
self._attr_available = False
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
manufacturer=manufacturer,
|
||||
model=device_type,
|
||||
name=room,
|
||||
suggested_area=room,
|
||||
via_device=(DOMAIN, config_entry.entry_id),
|
||||
)
|
||||
super().__init__(coordinator)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
response = await self.aio_livisi.async_vrcc_set_temperature(
|
||||
self._target_temperature_capability,
|
||||
kwargs.get(ATTR_TEMPERATURE),
|
||||
self.coordinator.is_avatar,
|
||||
)
|
||||
if response is None:
|
||||
self._attr_available = False
|
||||
raise HomeAssistantError(f"Failed to turn off {self._attr_name}")
|
||||
|
||||
def set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Do nothing as LIVISI devices do not support changing the hvac mode."""
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
target_temperature = await self.coordinator.async_get_vrcc_target_temperature(
|
||||
self._target_temperature_capability
|
||||
)
|
||||
temperature = await self.coordinator.async_get_vrcc_temperature(
|
||||
self._temperature_capability
|
||||
)
|
||||
humidity = await self.coordinator.async_get_vrcc_humidity(
|
||||
self._humidity_capability
|
||||
)
|
||||
if temperature is None:
|
||||
self._attr_current_temperature = None
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_target_temperature = target_temperature
|
||||
self._attr_current_temperature = temperature
|
||||
self._attr_current_humidity = humidity
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{LIVISI_STATE_CHANGE}_{self._target_temperature_capability}",
|
||||
self.update_target_temperature,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{LIVISI_STATE_CHANGE}_{self._temperature_capability}",
|
||||
self.update_temperature,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{LIVISI_STATE_CHANGE}_{self._humidity_capability}",
|
||||
self.update_humidity,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{LIVISI_REACHABILITY_CHANGE}_{self.unique_id}",
|
||||
self.update_reachability,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def update_target_temperature(self, target_temperature: float) -> None:
|
||||
"""Update the target temperature of the climate device."""
|
||||
self._attr_target_temperature = target_temperature
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def update_temperature(self, current_temperature: float) -> None:
|
||||
"""Update the current temperature of the climate device."""
|
||||
self._attr_current_temperature = current_temperature
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def update_humidity(self, humidity: int) -> None:
|
||||
"""Update the humidity temperature of the climate device."""
|
||||
self._attr_current_humidity = humidity
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def update_reachability(self, is_reachable: bool) -> None:
|
||||
"""Update the reachability of the climate device."""
|
||||
self._attr_available = is_reachable
|
||||
self.async_write_ha_state()
|
@@ -7,12 +7,15 @@ DOMAIN = "livisi"
|
||||
|
||||
CONF_HOST = "host"
|
||||
CONF_PASSWORD: Final = "password"
|
||||
AVATAR = "Avatar"
|
||||
AVATAR_PORT: Final = 9090
|
||||
CLASSIC_PORT: Final = 8080
|
||||
DEVICE_POLLING_DELAY: Final = 60
|
||||
LIVISI_STATE_CHANGE: Final = "livisi_state_change"
|
||||
LIVISI_REACHABILITY_CHANGE: Final = "livisi_reachability_change"
|
||||
|
||||
SWITCH_PLATFORM: Final = "switch"
|
||||
|
||||
PSS_DEVICE_TYPE: Final = "PSS"
|
||||
VRCC_DEVICE_TYPE: Final = "VRCC"
|
||||
|
||||
MAX_TEMPERATURE: Final = 30.0
|
||||
MIN_TEMPERATURE: Final = 6.0
|
||||
|
@@ -13,6 +13,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
AVATAR,
|
||||
AVATAR_PORT,
|
||||
CLASSIC_PORT,
|
||||
CONF_HOST,
|
||||
@@ -69,14 +70,14 @@ class LivisiDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]):
|
||||
livisi_connection_data=livisi_connection_data
|
||||
)
|
||||
controller_data = await self.aiolivisi.async_get_controller()
|
||||
if controller_data["controllerType"] == "Avatar":
|
||||
if (controller_type := controller_data["controllerType"]) == AVATAR:
|
||||
self.port = AVATAR_PORT
|
||||
self.is_avatar = True
|
||||
else:
|
||||
self.port = CLASSIC_PORT
|
||||
self.is_avatar = False
|
||||
self.controller_type = controller_type
|
||||
self.serial_number = controller_data["serialNumber"]
|
||||
self.controller_type = controller_data["controllerType"]
|
||||
|
||||
async def async_get_devices(self) -> list[dict[str, Any]]:
|
||||
"""Set the discovered devices list."""
|
||||
@@ -84,7 +85,7 @@ class LivisiDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]):
|
||||
|
||||
async def async_get_pss_state(self, capability: str) -> bool | None:
|
||||
"""Set the PSS state."""
|
||||
response: dict[str, Any] = await self.aiolivisi.async_get_device_state(
|
||||
response: dict[str, Any] | None = await self.aiolivisi.async_get_device_state(
|
||||
capability[1:]
|
||||
)
|
||||
if response is None:
|
||||
@@ -92,6 +93,35 @@ class LivisiDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]):
|
||||
on_state = response["onState"]
|
||||
return on_state["value"]
|
||||
|
||||
async def async_get_vrcc_target_temperature(self, capability: str) -> float | None:
|
||||
"""Get the target temperature of the climate device."""
|
||||
response: dict[str, Any] | None = await self.aiolivisi.async_get_device_state(
|
||||
capability[1:]
|
||||
)
|
||||
if response is None:
|
||||
return None
|
||||
if self.is_avatar:
|
||||
return response["setpointTemperature"]["value"]
|
||||
return response["pointTemperature"]["value"]
|
||||
|
||||
async def async_get_vrcc_temperature(self, capability: str) -> float | None:
|
||||
"""Get the temperature of the climate device."""
|
||||
response: dict[str, Any] | None = await self.aiolivisi.async_get_device_state(
|
||||
capability[1:]
|
||||
)
|
||||
if response is None:
|
||||
return None
|
||||
return response["temperature"]["value"]
|
||||
|
||||
async def async_get_vrcc_humidity(self, capability: str) -> int | None:
|
||||
"""Get the humidity of the climate device."""
|
||||
response: dict[str, Any] | None = await self.aiolivisi.async_get_device_state(
|
||||
capability[1:]
|
||||
)
|
||||
if response is None:
|
||||
return None
|
||||
return response["humidity"]["value"]
|
||||
|
||||
async def async_set_all_rooms(self) -> None:
|
||||
"""Set the room list."""
|
||||
response: list[dict[str, Any]] = await self.aiolivisi.async_get_all_rooms()
|
||||
@@ -108,6 +138,12 @@ class LivisiDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]):
|
||||
f"{LIVISI_STATE_CHANGE}_{event_data.source}",
|
||||
event_data.onState,
|
||||
)
|
||||
if event_data.vrccData is not None:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{LIVISI_STATE_CHANGE}_{event_data.source}",
|
||||
event_data.vrccData,
|
||||
)
|
||||
if event_data.isReachable is not None:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/livisi",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aiolivisi==0.0.15"]
|
||||
"requirements": ["aiolivisi==0.0.16"]
|
||||
}
|
||||
|
@@ -15,7 +15,9 @@ from pydantic import ValidationError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.calendar import (
|
||||
EVENT_END,
|
||||
EVENT_RRULE,
|
||||
EVENT_START,
|
||||
CalendarEntity,
|
||||
CalendarEntityFeature,
|
||||
CalendarEvent,
|
||||
@@ -151,6 +153,21 @@ def _parse_event(event: dict[str, Any]) -> Event:
|
||||
"""Parse an ical event from a home assistant event dictionary."""
|
||||
if rrule := event.get(EVENT_RRULE):
|
||||
event[EVENT_RRULE] = Recur.from_rrule(rrule)
|
||||
|
||||
# This function is called with new events created in the local timezone,
|
||||
# however ical library does not properly return recurrence_ids for
|
||||
# start dates with a timezone. For now, ensure any datetime is stored as a
|
||||
# floating local time to ensure we still apply proper local timezone rules.
|
||||
# This can be removed when ical is updated with a new recurrence_id format
|
||||
# https://github.com/home-assistant/core/issues/87759
|
||||
for key in (EVENT_START, EVENT_END):
|
||||
if (
|
||||
(value := event[key])
|
||||
and isinstance(value, datetime)
|
||||
and value.tzinfo is not None
|
||||
):
|
||||
event[key] = dt_util.as_local(value).replace(tzinfo=None)
|
||||
|
||||
try:
|
||||
return Event.parse_obj(event)
|
||||
except ValidationError as err:
|
||||
@@ -162,8 +179,12 @@ def _get_calendar_event(event: Event) -> CalendarEvent:
|
||||
"""Return a CalendarEvent from an API event."""
|
||||
return CalendarEvent(
|
||||
summary=event.summary,
|
||||
start=event.start,
|
||||
end=event.end,
|
||||
start=dt_util.as_local(event.start)
|
||||
if isinstance(event.start, datetime)
|
||||
else event.start,
|
||||
end=dt_util.as_local(event.end)
|
||||
if isinstance(event.end, datetime)
|
||||
else event.end,
|
||||
description=event.description,
|
||||
uid=event.uid,
|
||||
rrule=event.rrule.as_rrule_str() if event.rrule else None,
|
||||
|
@@ -33,6 +33,7 @@ from homeassistant.helpers.config_validation import ( # noqa: F401
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.service import remove_entity_service_fields
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -92,7 +93,7 @@ async def _async_lock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for locking {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_lock(**service_call.data)
|
||||
await entity.async_lock(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
@@ -102,7 +103,7 @@ async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for unlocking {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_unlock(**service_call.data)
|
||||
await entity.async_unlock(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
@@ -112,7 +113,7 @@ async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for opening {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_open(**service_call.data)
|
||||
await entity.async_open(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
@@ -27,7 +27,7 @@ from .adapter import MatterAdapter
|
||||
from .addon import get_addon_manager
|
||||
from .api import async_register_api
|
||||
from .const import CONF_INTEGRATION_CREATED_ADDON, CONF_USE_ADDON, DOMAIN, LOGGER
|
||||
from .device_platform import DEVICE_PLATFORM
|
||||
from .discovery import SUPPORTED_PLATFORMS
|
||||
from .helpers import MatterEntryData, get_matter, get_node_from_device_entry
|
||||
|
||||
CONNECT_TIMEOUT = 10
|
||||
@@ -101,12 +101,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
matter = MatterAdapter(hass, matter_client, entry)
|
||||
hass.data[DOMAIN][entry.entry_id] = MatterEntryData(matter, listen_task)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, DEVICE_PLATFORM)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, SUPPORTED_PLATFORMS)
|
||||
await matter.setup_nodes()
|
||||
|
||||
# If the listen task is already failed, we need to raise ConfigEntryNotReady
|
||||
if listen_task.done() and (listen_error := listen_task.exception()) is not None:
|
||||
await hass.config_entries.async_unload_platforms(entry, DEVICE_PLATFORM)
|
||||
await hass.config_entries.async_unload_platforms(entry, SUPPORTED_PLATFORMS)
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
try:
|
||||
await matter_client.disconnect()
|
||||
@@ -142,7 +142,9 @@ async def _client_listen(
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, DEVICE_PLATFORM)
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
entry, SUPPORTED_PLATFORMS
|
||||
)
|
||||
|
||||
if unload_ok:
|
||||
matter_entry_data: MatterEntryData = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
@@ -3,11 +3,6 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from chip.clusters import Objects as all_clusters
|
||||
from matter_server.client.models.node_device import (
|
||||
AbstractMatterNodeDevice,
|
||||
MatterBridgedNodeDevice,
|
||||
)
|
||||
from matter_server.common.models import EventType, ServerInfoMessage
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -17,12 +12,12 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, ID_TYPE_DEVICE_ID, ID_TYPE_SERIAL, LOGGER
|
||||
from .device_platform import DEVICE_PLATFORM
|
||||
from .discovery import async_discover_entities
|
||||
from .helpers import get_device_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from matter_server.client import MatterClient
|
||||
from matter_server.client.models.node import MatterNode
|
||||
from matter_server.client.models.node import MatterEndpoint, MatterNode
|
||||
|
||||
|
||||
class MatterAdapter:
|
||||
@@ -51,12 +46,8 @@ class MatterAdapter:
|
||||
for node in await self.matter_client.get_nodes():
|
||||
self._setup_node(node)
|
||||
|
||||
def node_added_callback(event: EventType, node: MatterNode | None) -> None:
|
||||
def node_added_callback(event: EventType, node: MatterNode) -> None:
|
||||
"""Handle node added event."""
|
||||
if node is None:
|
||||
# We can clean this up when we've improved the typing in the library.
|
||||
# https://github.com/home-assistant-libs/python-matter-server/pull/153
|
||||
raise RuntimeError("Node added event without node")
|
||||
self._setup_node(node)
|
||||
|
||||
self.config_entry.async_on_unload(
|
||||
@@ -67,48 +58,32 @@ class MatterAdapter:
|
||||
"""Set up an node."""
|
||||
LOGGER.debug("Setting up entities for node %s", node.node_id)
|
||||
|
||||
bridge_unique_id: str | None = None
|
||||
|
||||
if (
|
||||
node.aggregator_device_type_instance is not None
|
||||
and node.root_device_type_instance is not None
|
||||
and node.root_device_type_instance.get_cluster(
|
||||
all_clusters.BasicInformation
|
||||
)
|
||||
):
|
||||
# create virtual (parent) device for bridge node device
|
||||
bridge_device = MatterBridgedNodeDevice(
|
||||
node.aggregator_device_type_instance
|
||||
)
|
||||
self._create_device_registry(bridge_device)
|
||||
server_info = cast(ServerInfoMessage, self.matter_client.server_info)
|
||||
bridge_unique_id = get_device_id(server_info, bridge_device)
|
||||
|
||||
for node_device in node.node_devices:
|
||||
self._setup_node_device(node_device, bridge_unique_id)
|
||||
for endpoint in node.endpoints.values():
|
||||
# Node endpoints are translated into HA devices
|
||||
self._setup_endpoint(endpoint)
|
||||
|
||||
def _create_device_registry(
|
||||
self,
|
||||
node_device: AbstractMatterNodeDevice,
|
||||
bridge_unique_id: str | None = None,
|
||||
endpoint: MatterEndpoint,
|
||||
) -> None:
|
||||
"""Create a device registry entry."""
|
||||
"""Create a device registry entry for a MatterNode."""
|
||||
server_info = cast(ServerInfoMessage, self.matter_client.server_info)
|
||||
|
||||
basic_info = node_device.device_info()
|
||||
device_type_instances = node_device.device_type_instances()
|
||||
basic_info = endpoint.device_info
|
||||
name = basic_info.nodeLabel or basic_info.productLabel or basic_info.productName
|
||||
|
||||
name = basic_info.nodeLabel
|
||||
if not name and isinstance(node_device, MatterBridgedNodeDevice):
|
||||
# fallback name for Bridge
|
||||
name = "Hub device"
|
||||
elif not name and device_type_instances:
|
||||
# use the productName if no node label is present
|
||||
name = basic_info.productName
|
||||
# handle bridged devices
|
||||
bridge_device_id = None
|
||||
if endpoint.is_bridged_device:
|
||||
bridge_device_id = get_device_id(
|
||||
server_info,
|
||||
endpoint.node.endpoints[0],
|
||||
)
|
||||
bridge_device_id = f"{ID_TYPE_DEVICE_ID}_{bridge_device_id}"
|
||||
|
||||
node_device_id = get_device_id(
|
||||
server_info,
|
||||
node_device,
|
||||
endpoint,
|
||||
)
|
||||
identifiers = {(DOMAIN, f"{ID_TYPE_DEVICE_ID}_{node_device_id}")}
|
||||
# if available, we also add the serialnumber as identifier
|
||||
@@ -124,50 +99,21 @@ class MatterAdapter:
|
||||
sw_version=basic_info.softwareVersionString,
|
||||
manufacturer=basic_info.vendorName,
|
||||
model=basic_info.productName,
|
||||
via_device=(DOMAIN, bridge_unique_id) if bridge_unique_id else None,
|
||||
via_device=(DOMAIN, bridge_device_id) if bridge_device_id else None,
|
||||
)
|
||||
|
||||
def _setup_node_device(
|
||||
self, node_device: AbstractMatterNodeDevice, bridge_unique_id: str | None
|
||||
) -> None:
|
||||
"""Set up a node device."""
|
||||
self._create_device_registry(node_device, bridge_unique_id)
|
||||
def _setup_endpoint(self, endpoint: MatterEndpoint) -> None:
|
||||
"""Set up a MatterEndpoint as HA Device."""
|
||||
# pre-create device registry entry
|
||||
self._create_device_registry(endpoint)
|
||||
# run platform discovery from device type instances
|
||||
for instance in node_device.device_type_instances():
|
||||
created = False
|
||||
|
||||
for platform, devices in DEVICE_PLATFORM.items():
|
||||
entity_descriptions = devices.get(instance.device_type)
|
||||
|
||||
if entity_descriptions is None:
|
||||
continue
|
||||
|
||||
if not isinstance(entity_descriptions, list):
|
||||
entity_descriptions = [entity_descriptions]
|
||||
|
||||
entities = []
|
||||
for entity_description in entity_descriptions:
|
||||
LOGGER.debug(
|
||||
"Creating %s entity for %s (%s)",
|
||||
platform,
|
||||
instance.device_type.__name__,
|
||||
hex(instance.device_type.device_type),
|
||||
)
|
||||
entities.append(
|
||||
entity_description.entity_cls(
|
||||
self.matter_client,
|
||||
node_device,
|
||||
instance,
|
||||
entity_description,
|
||||
)
|
||||
)
|
||||
|
||||
self.platform_handlers[platform](entities)
|
||||
created = True
|
||||
|
||||
if not created:
|
||||
LOGGER.warning(
|
||||
"Found unsupported device %s (%s)",
|
||||
type(instance).__name__,
|
||||
hex(instance.device_type.device_type),
|
||||
)
|
||||
for entity_info in async_discover_entities(endpoint):
|
||||
LOGGER.debug(
|
||||
"Creating %s entity for %s",
|
||||
entity_info.platform,
|
||||
entity_info.primary_attribute,
|
||||
)
|
||||
new_entity = entity_info.entity_class(
|
||||
self.matter_client, endpoint, entity_info
|
||||
)
|
||||
self.platform_handlers[entity_info.platform]([new_entity])
|
||||
|
@@ -1,11 +1,9 @@
|
||||
"""Matter binary sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from matter_server.client.models import device_types
|
||||
from chip.clusters.Objects import uint
|
||||
from chip.clusters.Types import Nullable, NullValue
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -17,8 +15,9 @@ from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescriptionBaseClass
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -34,60 +33,70 @@ async def async_setup_entry(
|
||||
class MatterBinarySensor(MatterEntity, BinarySensorEntity):
|
||||
"""Representation of a Matter binary sensor."""
|
||||
|
||||
entity_description: MatterBinarySensorEntityDescription
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
self._attr_is_on = self.get_matter_attribute_value(
|
||||
# We always subscribe to a single value
|
||||
self.entity_description.subscribe_attributes[0],
|
||||
)
|
||||
value: bool | uint | int | Nullable | None
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value in (None, NullValue):
|
||||
value = None
|
||||
elif value_convert := self._entity_info.measurement_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_is_on = value
|
||||
|
||||
|
||||
class MatterOccupancySensor(MatterBinarySensor):
|
||||
"""Representation of a Matter occupancy sensor."""
|
||||
|
||||
_attr_device_class = BinarySensorDeviceClass.OCCUPANCY
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
value = self.get_matter_attribute_value(
|
||||
# We always subscribe to a single value
|
||||
self.entity_description.subscribe_attributes[0],
|
||||
)
|
||||
# Discovery schema(s) to map Matter Attributes to HA entities
|
||||
DISCOVERY_SCHEMAS = [
|
||||
# device specific: translate Hue motion to sensor to HA Motion sensor
|
||||
# instead of generic occupancy sensor
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=BinarySensorEntityDescription(
|
||||
key="HueMotionSensor",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
name="Motion",
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
||||
vendor_id=(4107,),
|
||||
product_name=("Hue motion sensor",),
|
||||
measurement_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=BinarySensorEntityDescription(
|
||||
key="ContactSensor",
|
||||
device_class=BinarySensorDeviceClass.DOOR,
|
||||
name="Contact",
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.BooleanState.Attributes.StateValue,),
|
||||
# value is inverted on matter to what we expect
|
||||
measurement_to_ha=lambda x: not x,
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=BinarySensorEntityDescription(
|
||||
key="OccupancySensor",
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
name="Occupancy",
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
||||
# The first bit = if occupied
|
||||
self._attr_is_on = (value & 1 == 1) if value is not None else None
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterBinarySensorEntityDescription(
|
||||
BinarySensorEntityDescription,
|
||||
MatterEntityDescriptionBaseClass,
|
||||
):
|
||||
"""Matter Binary Sensor entity description."""
|
||||
|
||||
|
||||
# You can't set default values on inherited data classes
|
||||
MatterSensorEntityDescriptionFactory = partial(
|
||||
MatterBinarySensorEntityDescription, entity_cls=MatterBinarySensor
|
||||
)
|
||||
|
||||
DEVICE_ENTITY: dict[
|
||||
type[device_types.DeviceType],
|
||||
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
|
||||
] = {
|
||||
device_types.ContactSensor: MatterSensorEntityDescriptionFactory(
|
||||
key=device_types.ContactSensor,
|
||||
name="Contact",
|
||||
subscribe_attributes=(clusters.BooleanState.Attributes.StateValue,),
|
||||
device_class=BinarySensorDeviceClass.DOOR,
|
||||
measurement_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
||||
),
|
||||
device_types.OccupancySensor: MatterSensorEntityDescriptionFactory(
|
||||
key=device_types.OccupancySensor,
|
||||
name="Occupancy",
|
||||
entity_cls=MatterOccupancySensor,
|
||||
subscribe_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=BinarySensorEntityDescription(
|
||||
key="BatteryChargeLevel",
|
||||
device_class=BinarySensorDeviceClass.BATTERY,
|
||||
name="Battery Status",
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.PowerSource.Attributes.BatChargeLevel,),
|
||||
# only add binary battery sensor if a regular percentage based is not available
|
||||
absent_attributes=(clusters.PowerSource.Attributes.BatPercentRemaining,),
|
||||
measurement_to_ha=lambda x: x != clusters.PowerSource.Enums.BatChargeLevel.kOk,
|
||||
),
|
||||
}
|
||||
]
|
||||
|
@@ -1,30 +0,0 @@
|
||||
"""All mappings of Matter devices to Home Assistant platforms."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
from .binary_sensor import DEVICE_ENTITY as BINARY_SENSOR_DEVICE_ENTITY
|
||||
from .light import DEVICE_ENTITY as LIGHT_DEVICE_ENTITY
|
||||
from .sensor import DEVICE_ENTITY as SENSOR_DEVICE_ENTITY
|
||||
from .switch import DEVICE_ENTITY as SWITCH_DEVICE_ENTITY
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from matter_server.client.models.device_types import DeviceType
|
||||
|
||||
from .entity import MatterEntityDescriptionBaseClass
|
||||
|
||||
|
||||
DEVICE_PLATFORM: dict[
|
||||
Platform,
|
||||
dict[
|
||||
type[DeviceType],
|
||||
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
|
||||
],
|
||||
] = {
|
||||
Platform.BINARY_SENSOR: BINARY_SENSOR_DEVICE_ENTITY,
|
||||
Platform.LIGHT: LIGHT_DEVICE_ENTITY,
|
||||
Platform.SENSOR: SENSOR_DEVICE_ENTITY,
|
||||
Platform.SWITCH: SWITCH_DEVICE_ENTITY,
|
||||
}
|
115
homeassistant/components/matter/discovery.py
Normal file
115
homeassistant/components/matter/discovery.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Map Matter Nodes and Attributes to Home Assistant entities."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
|
||||
from chip.clusters.Objects import ClusterAttributeDescriptor
|
||||
from matter_server.client.models.node import MatterEndpoint
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .binary_sensor import DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS
|
||||
from .light import DISCOVERY_SCHEMAS as LIGHT_SCHEMAS
|
||||
from .models import MatterDiscoverySchema, MatterEntityInfo
|
||||
from .sensor import DISCOVERY_SCHEMAS as SENSOR_SCHEMAS
|
||||
from .switch import DISCOVERY_SCHEMAS as SWITCH_SCHEMAS
|
||||
|
||||
DISCOVERY_SCHEMAS: dict[Platform, list[MatterDiscoverySchema]] = {
|
||||
Platform.BINARY_SENSOR: BINARY_SENSOR_SCHEMAS,
|
||||
Platform.LIGHT: LIGHT_SCHEMAS,
|
||||
Platform.SENSOR: SENSOR_SCHEMAS,
|
||||
Platform.SWITCH: SWITCH_SCHEMAS,
|
||||
}
|
||||
SUPPORTED_PLATFORMS = tuple(DISCOVERY_SCHEMAS.keys())
|
||||
|
||||
|
||||
@callback
|
||||
def iter_schemas() -> Generator[MatterDiscoverySchema, None, None]:
|
||||
"""Iterate over all available discovery schemas."""
|
||||
for platform_schemas in DISCOVERY_SCHEMAS.values():
|
||||
yield from platform_schemas
|
||||
|
||||
|
||||
@callback
|
||||
def async_discover_entities(
|
||||
endpoint: MatterEndpoint,
|
||||
) -> Generator[MatterEntityInfo, None, None]:
|
||||
"""Run discovery on MatterEndpoint and return matching MatterEntityInfo(s)."""
|
||||
discovered_attributes: set[type[ClusterAttributeDescriptor]] = set()
|
||||
device_info = endpoint.device_info
|
||||
for schema in iter_schemas():
|
||||
# abort if attribute(s) already discovered
|
||||
if any(x in schema.required_attributes for x in discovered_attributes):
|
||||
continue
|
||||
|
||||
# check vendor_id
|
||||
if (
|
||||
schema.vendor_id is not None
|
||||
and device_info.vendorID not in schema.vendor_id
|
||||
):
|
||||
continue
|
||||
|
||||
# check product_name
|
||||
if (
|
||||
schema.product_name is not None
|
||||
and device_info.productName not in schema.product_name
|
||||
):
|
||||
continue
|
||||
|
||||
# check required device_type
|
||||
if schema.device_type is not None and not any(
|
||||
x in schema.device_type for x in endpoint.device_types
|
||||
):
|
||||
continue
|
||||
|
||||
# check absent device_type
|
||||
if schema.not_device_type is not None and any(
|
||||
x in schema.not_device_type for x in endpoint.device_types
|
||||
):
|
||||
continue
|
||||
|
||||
# check endpoint_id
|
||||
if (
|
||||
schema.endpoint_id is not None
|
||||
and endpoint.endpoint_id not in schema.endpoint_id
|
||||
):
|
||||
continue
|
||||
|
||||
# check required attributes
|
||||
if schema.required_attributes is not None and not all(
|
||||
endpoint.has_attribute(None, val_schema)
|
||||
for val_schema in schema.required_attributes
|
||||
):
|
||||
continue
|
||||
|
||||
# check for values that may not be present
|
||||
if schema.absent_attributes is not None and any(
|
||||
endpoint.has_attribute(None, val_schema)
|
||||
for val_schema in schema.absent_attributes
|
||||
):
|
||||
continue
|
||||
|
||||
# all checks passed, this value belongs to an entity
|
||||
|
||||
attributes_to_watch = list(schema.required_attributes)
|
||||
if schema.optional_attributes:
|
||||
# check optional attributes
|
||||
for optional_attribute in schema.optional_attributes:
|
||||
if optional_attribute in attributes_to_watch:
|
||||
continue
|
||||
if endpoint.has_attribute(None, optional_attribute):
|
||||
attributes_to_watch.append(optional_attribute)
|
||||
|
||||
yield MatterEntityInfo(
|
||||
endpoint=endpoint,
|
||||
platform=schema.platform,
|
||||
attributes_to_watch=attributes_to_watch,
|
||||
entity_description=schema.entity_description,
|
||||
entity_class=schema.entity_class,
|
||||
measurement_to_ha=schema.measurement_to_ha,
|
||||
)
|
||||
|
||||
# prevent re-discovery of the same attributes
|
||||
if not schema.allow_multi:
|
||||
discovered_attributes.update(attributes_to_watch)
|
@@ -3,90 +3,77 @@ from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from chip.clusters.Objects import ClusterAttributeDescriptor
|
||||
from matter_server.client.models.device_type_instance import MatterDeviceTypeInstance
|
||||
from matter_server.client.models.node_device import AbstractMatterNodeDevice
|
||||
from matter_server.common.helpers.util import create_attribute_path
|
||||
from matter_server.common.models import EventType, ServerInfoMessage
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.entity import DeviceInfo, Entity, EntityDescription
|
||||
from homeassistant.helpers.entity import DeviceInfo, Entity
|
||||
|
||||
from .const import DOMAIN, ID_TYPE_DEVICE_ID
|
||||
from .helpers import get_device_id, get_operational_instance_id
|
||||
from .helpers import get_device_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from matter_server.client import MatterClient
|
||||
from matter_server.client.models.node import MatterEndpoint
|
||||
|
||||
from .discovery import MatterEntityInfo
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterEntityDescription:
|
||||
"""Mixin to map a matter device to a Home Assistant entity."""
|
||||
|
||||
entity_cls: type[MatterEntity]
|
||||
subscribe_attributes: tuple
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterEntityDescriptionBaseClass(EntityDescription, MatterEntityDescription):
|
||||
"""For typing a base class that inherits from both entity descriptions."""
|
||||
|
||||
|
||||
class MatterEntity(Entity):
|
||||
"""Entity class for Matter devices."""
|
||||
|
||||
entity_description: MatterEntityDescriptionBaseClass
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
matter_client: MatterClient,
|
||||
node_device: AbstractMatterNodeDevice,
|
||||
device_type_instance: MatterDeviceTypeInstance,
|
||||
entity_description: MatterEntityDescriptionBaseClass,
|
||||
endpoint: MatterEndpoint,
|
||||
entity_info: MatterEntityInfo,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.matter_client = matter_client
|
||||
self._node_device = node_device
|
||||
self._device_type_instance = device_type_instance
|
||||
self.entity_description = entity_description
|
||||
self._endpoint = endpoint
|
||||
self._entity_info = entity_info
|
||||
self.entity_description = entity_info.entity_description
|
||||
self._unsubscribes: list[Callable] = []
|
||||
# for fast lookups we create a mapping to the attribute paths
|
||||
self._attributes_map: dict[type, str] = {}
|
||||
# The server info is set when the client connects to the server.
|
||||
server_info = cast(ServerInfoMessage, self.matter_client.server_info)
|
||||
# create unique_id based on "Operational Instance Name" and endpoint/device type
|
||||
node_device_id = get_device_id(server_info, endpoint)
|
||||
self._attr_unique_id = (
|
||||
f"{get_operational_instance_id(server_info, self._node_device.node())}-"
|
||||
f"{device_type_instance.endpoint.endpoint_id}-"
|
||||
f"{device_type_instance.device_type.device_type}"
|
||||
f"{node_device_id}-"
|
||||
f"{endpoint.endpoint_id}-"
|
||||
f"{entity_info.entity_description.key}-"
|
||||
f"{entity_info.primary_attribute.cluster_id}-"
|
||||
f"{entity_info.primary_attribute.attribute_id}"
|
||||
)
|
||||
node_device_id = get_device_id(server_info, node_device)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{ID_TYPE_DEVICE_ID}_{node_device_id}")}
|
||||
)
|
||||
self._attr_available = self._node_device.node().available
|
||||
self._attr_available = self._endpoint.node.available
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle being added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
# Subscribe to attribute updates.
|
||||
for attr_cls in self.entity_description.subscribe_attributes:
|
||||
for attr_cls in self._entity_info.attributes_to_watch:
|
||||
attr_path = self.get_matter_attribute_path(attr_cls)
|
||||
self._attributes_map[attr_cls] = attr_path
|
||||
self._unsubscribes.append(
|
||||
self.matter_client.subscribe(
|
||||
callback=self._on_matter_event,
|
||||
event_filter=EventType.ATTRIBUTE_UPDATED,
|
||||
node_filter=self._device_type_instance.node.node_id,
|
||||
node_filter=self._endpoint.node.node_id,
|
||||
attr_path_filter=attr_path,
|
||||
)
|
||||
)
|
||||
@@ -95,7 +82,7 @@ class MatterEntity(Entity):
|
||||
self.matter_client.subscribe(
|
||||
callback=self._on_matter_event,
|
||||
event_filter=EventType.NODE_UPDATED,
|
||||
node_filter=self._device_type_instance.node.node_id,
|
||||
node_filter=self._endpoint.node.node_id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -110,7 +97,7 @@ class MatterEntity(Entity):
|
||||
@callback
|
||||
def _on_matter_event(self, event: EventType, data: Any = None) -> None:
|
||||
"""Call on update."""
|
||||
self._attr_available = self._device_type_instance.node.available
|
||||
self._attr_available = self._endpoint.node.available
|
||||
self._update_from_device()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -124,14 +111,13 @@ class MatterEntity(Entity):
|
||||
self, attribute: type[ClusterAttributeDescriptor]
|
||||
) -> Any:
|
||||
"""Get current value for given attribute."""
|
||||
return self._device_type_instance.get_attribute_value(None, attribute)
|
||||
return self._endpoint.get_attribute_value(None, attribute)
|
||||
|
||||
@callback
|
||||
def get_matter_attribute_path(
|
||||
self, attribute: type[ClusterAttributeDescriptor]
|
||||
) -> str:
|
||||
"""Return AttributePath by providing the endpoint and Attribute class."""
|
||||
endpoint = self._device_type_instance.endpoint.endpoint_id
|
||||
return create_attribute_path(
|
||||
endpoint, attribute.cluster_id, attribute.attribute_id
|
||||
self._endpoint.endpoint_id, attribute.cluster_id, attribute.attribute_id
|
||||
)
|
||||
|
@@ -11,8 +11,7 @@ from homeassistant.helpers import device_registry as dr
|
||||
from .const import DOMAIN, ID_TYPE_DEVICE_ID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from matter_server.client.models.node import MatterNode
|
||||
from matter_server.client.models.node_device import AbstractMatterNodeDevice
|
||||
from matter_server.client.models.node import MatterEndpoint, MatterNode
|
||||
from matter_server.common.models import ServerInfoMessage
|
||||
|
||||
from .adapter import MatterAdapter
|
||||
@@ -50,15 +49,21 @@ def get_operational_instance_id(
|
||||
|
||||
def get_device_id(
|
||||
server_info: ServerInfoMessage,
|
||||
node_device: AbstractMatterNodeDevice,
|
||||
endpoint: MatterEndpoint,
|
||||
) -> str:
|
||||
"""Return HA device_id for the given MatterNodeDevice."""
|
||||
operational_instance_id = get_operational_instance_id(
|
||||
server_info, node_device.node()
|
||||
)
|
||||
# Append nodedevice(type) to differentiate between a root node
|
||||
# and bridge within Home Assistant devices.
|
||||
return f"{operational_instance_id}-{node_device.__class__.__name__}"
|
||||
"""Return HA device_id for the given MatterEndpoint."""
|
||||
operational_instance_id = get_operational_instance_id(server_info, endpoint.node)
|
||||
# Append endpoint ID if this endpoint is a bridged or composed device
|
||||
if endpoint.is_composed_device:
|
||||
compose_parent = endpoint.node.get_compose_parent(endpoint.endpoint_id)
|
||||
assert compose_parent is not None
|
||||
postfix = str(compose_parent.endpoint_id)
|
||||
elif endpoint.is_bridged_device:
|
||||
postfix = str(endpoint.endpoint_id)
|
||||
else:
|
||||
# this should be compatible with previous versions
|
||||
postfix = "MatterNodeDevice"
|
||||
return f"{operational_instance_id}-{postfix}"
|
||||
|
||||
|
||||
async def get_node_from_device_entry(
|
||||
@@ -91,8 +96,8 @@ async def get_node_from_device_entry(
|
||||
(
|
||||
node
|
||||
for node in await matter_client.get_nodes()
|
||||
for node_device in node.node_devices
|
||||
if get_device_id(server_info, node_device) == device_id
|
||||
for endpoint in node.endpoints.values()
|
||||
if get_device_id(server_info, endpoint) == device_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
@@ -1,9 +1,7 @@
|
||||
"""Matter light."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from functools import partial
|
||||
from enum import IntFlag
|
||||
from typing import Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@@ -24,8 +22,9 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import LOGGER
|
||||
from .entity import MatterEntity, MatterEntityDescriptionBaseClass
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
from .util import (
|
||||
convert_to_hass_hs,
|
||||
convert_to_hass_xy,
|
||||
@@ -34,32 +33,13 @@ from .util import (
|
||||
renormalize,
|
||||
)
|
||||
|
||||
|
||||
class MatterColorMode(Enum):
|
||||
"""Matter color mode."""
|
||||
|
||||
HS = 0
|
||||
XY = 1
|
||||
COLOR_TEMP = 2
|
||||
|
||||
|
||||
COLOR_MODE_MAP = {
|
||||
MatterColorMode.HS: ColorMode.HS,
|
||||
MatterColorMode.XY: ColorMode.XY,
|
||||
MatterColorMode.COLOR_TEMP: ColorMode.COLOR_TEMP,
|
||||
clusters.ColorControl.Enums.ColorMode.kCurrentHueAndCurrentSaturation: ColorMode.HS,
|
||||
clusters.ColorControl.Enums.ColorMode.kCurrentXAndCurrentY: ColorMode.XY,
|
||||
clusters.ColorControl.Enums.ColorMode.kColorTemperature: ColorMode.COLOR_TEMP,
|
||||
}
|
||||
|
||||
|
||||
class MatterColorControlFeatures(Enum):
|
||||
"""Matter color control features."""
|
||||
|
||||
HS = 0 # Hue and saturation (Optional if device is color capable)
|
||||
EHUE = 1 # Enhanced hue and saturation (Optional if device is color capable)
|
||||
COLOR_LOOP = 2 # Color loop (Optional if device is color capable)
|
||||
XY = 3 # XY (Mandatory if device is color capable)
|
||||
COLOR_TEMP = 4 # Color temperature (Mandatory if device is color capable)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -73,63 +53,37 @@ async def async_setup_entry(
|
||||
class MatterLight(MatterEntity, LightEntity):
|
||||
"""Representation of a Matter light."""
|
||||
|
||||
entity_description: MatterLightEntityDescription
|
||||
|
||||
def _supports_feature(
|
||||
self, feature_map: int, feature: MatterColorControlFeatures
|
||||
) -> bool:
|
||||
"""Return if device supports given feature."""
|
||||
|
||||
return (feature_map & (1 << feature.value)) != 0
|
||||
|
||||
def _supports_color_mode(self, color_feature: MatterColorControlFeatures) -> bool:
|
||||
"""Return if device supports given color mode."""
|
||||
|
||||
feature_map = self.get_matter_attribute_value(
|
||||
clusters.ColorControl.Attributes.FeatureMap,
|
||||
)
|
||||
|
||||
assert isinstance(feature_map, int)
|
||||
|
||||
return self._supports_feature(feature_map, color_feature)
|
||||
|
||||
def _supports_hs_color(self) -> bool:
|
||||
"""Return if device supports hs color."""
|
||||
|
||||
return self._supports_color_mode(MatterColorControlFeatures.HS)
|
||||
|
||||
def _supports_xy_color(self) -> bool:
|
||||
"""Return if device supports xy color."""
|
||||
|
||||
return self._supports_color_mode(MatterColorControlFeatures.XY)
|
||||
|
||||
def _supports_color_temperature(self) -> bool:
|
||||
"""Return if device supports color temperature."""
|
||||
|
||||
return self._supports_color_mode(MatterColorControlFeatures.COLOR_TEMP)
|
||||
|
||||
def _supports_brightness(self) -> bool:
|
||||
"""Return if device supports brightness."""
|
||||
entity_description: LightEntityDescription
|
||||
|
||||
@property
|
||||
def supports_color(self) -> bool:
|
||||
"""Return if the device supports color control."""
|
||||
if not self._attr_supported_color_modes:
|
||||
return False
|
||||
return (
|
||||
clusters.LevelControl.Attributes.CurrentLevel
|
||||
in self.entity_description.subscribe_attributes
|
||||
ColorMode.HS in self._attr_supported_color_modes
|
||||
or ColorMode.XY in self._attr_supported_color_modes
|
||||
)
|
||||
|
||||
def _supports_color(self) -> bool:
|
||||
"""Return if device supports color."""
|
||||
@property
|
||||
def supports_color_temperature(self) -> bool:
|
||||
"""Return if the device supports color temperature control."""
|
||||
if not self._attr_supported_color_modes:
|
||||
return False
|
||||
return ColorMode.COLOR_TEMP in self._attr_supported_color_modes
|
||||
|
||||
return (
|
||||
clusters.ColorControl.Attributes.ColorMode
|
||||
in self.entity_description.subscribe_attributes
|
||||
)
|
||||
@property
|
||||
def supports_brightness(self) -> bool:
|
||||
"""Return if the device supports bridghtness control."""
|
||||
if not self._attr_supported_color_modes:
|
||||
return False
|
||||
return ColorMode.BRIGHTNESS in self._attr_supported_color_modes
|
||||
|
||||
async def _set_xy_color(self, xy_color: tuple[float, float]) -> None:
|
||||
"""Set xy color."""
|
||||
|
||||
matter_xy = convert_to_matter_xy(xy_color)
|
||||
|
||||
LOGGER.debug("Setting xy color to %s", matter_xy)
|
||||
await self.send_device_command(
|
||||
clusters.ColorControl.Commands.MoveToColor(
|
||||
colorX=int(matter_xy[0]),
|
||||
@@ -144,7 +98,6 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
|
||||
matter_hs = convert_to_matter_hs(hs_color)
|
||||
|
||||
LOGGER.debug("Setting hs color to %s", matter_hs)
|
||||
await self.send_device_command(
|
||||
clusters.ColorControl.Commands.MoveToHueAndSaturation(
|
||||
hue=int(matter_hs[0]),
|
||||
@@ -157,7 +110,6 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
async def _set_color_temp(self, color_temp: int) -> None:
|
||||
"""Set color temperature."""
|
||||
|
||||
LOGGER.debug("Setting color temperature to %s", color_temp)
|
||||
await self.send_device_command(
|
||||
clusters.ColorControl.Commands.MoveToColorTemperature(
|
||||
colorTemperature=color_temp,
|
||||
@@ -169,8 +121,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
async def _set_brightness(self, brightness: int) -> None:
|
||||
"""Set brightness."""
|
||||
|
||||
LOGGER.debug("Setting brightness to %s", brightness)
|
||||
level_control = self._device_type_instance.get_cluster(clusters.LevelControl)
|
||||
level_control = self._endpoint.get_cluster(clusters.LevelControl)
|
||||
|
||||
assert level_control is not None
|
||||
|
||||
@@ -207,7 +158,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
LOGGER.debug(
|
||||
"Got xy color %s for %s",
|
||||
xy_color,
|
||||
self._device_type_instance,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
return xy_color
|
||||
@@ -231,7 +182,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
LOGGER.debug(
|
||||
"Got hs color %s for %s",
|
||||
hs_color,
|
||||
self._device_type_instance,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
return hs_color
|
||||
@@ -248,7 +199,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
LOGGER.debug(
|
||||
"Got color temperature %s for %s",
|
||||
color_temp,
|
||||
self._device_type_instance,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
return int(color_temp)
|
||||
@@ -256,7 +207,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
def _get_brightness(self) -> int:
|
||||
"""Get brightness from matter."""
|
||||
|
||||
level_control = self._device_type_instance.get_cluster(clusters.LevelControl)
|
||||
level_control = self._endpoint.get_cluster(clusters.LevelControl)
|
||||
|
||||
# We should not get here if brightness is not supported.
|
||||
assert level_control is not None
|
||||
@@ -264,7 +215,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
LOGGER.debug( # type: ignore[unreachable]
|
||||
"Got brightness %s for %s",
|
||||
level_control.currentLevel,
|
||||
self._device_type_instance,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
return round(
|
||||
@@ -284,10 +235,12 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
|
||||
assert color_mode is not None
|
||||
|
||||
ha_color_mode = COLOR_MODE_MAP[MatterColorMode(color_mode)]
|
||||
ha_color_mode = COLOR_MODE_MAP[color_mode]
|
||||
|
||||
LOGGER.debug(
|
||||
"Got color mode (%s) for %s", ha_color_mode, self._device_type_instance
|
||||
"Got color mode (%s) for %s",
|
||||
ha_color_mode,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
return ha_color_mode
|
||||
@@ -295,8 +248,8 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
async def send_device_command(self, command: Any) -> None:
|
||||
"""Send device command."""
|
||||
await self.matter_client.send_device_command(
|
||||
node_id=self._device_type_instance.node.node_id,
|
||||
endpoint_id=self._device_type_instance.endpoint_id,
|
||||
node_id=self._endpoint.node.node_id,
|
||||
endpoint_id=self._endpoint.endpoint_id,
|
||||
command=command,
|
||||
)
|
||||
|
||||
@@ -308,15 +261,18 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
color_temp = kwargs.get(ATTR_COLOR_TEMP)
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
|
||||
if self._supports_color():
|
||||
if hs_color is not None and self._supports_hs_color():
|
||||
if self.supported_color_modes is not None:
|
||||
if hs_color is not None and ColorMode.HS in self.supported_color_modes:
|
||||
await self._set_hs_color(hs_color)
|
||||
elif xy_color is not None and self._supports_xy_color():
|
||||
elif xy_color is not None and ColorMode.XY in self.supported_color_modes:
|
||||
await self._set_xy_color(xy_color)
|
||||
elif color_temp is not None and self._supports_color_temperature():
|
||||
elif (
|
||||
color_temp is not None
|
||||
and ColorMode.COLOR_TEMP in self.supported_color_modes
|
||||
):
|
||||
await self._set_color_temp(color_temp)
|
||||
|
||||
if brightness is not None and self._supports_brightness():
|
||||
if brightness is not None and self.supports_brightness:
|
||||
await self._set_brightness(brightness)
|
||||
return
|
||||
|
||||
@@ -333,107 +289,81 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
|
||||
supports_color = self._supports_color()
|
||||
supports_color_temperature = (
|
||||
self._supports_color_temperature() if supports_color else False
|
||||
)
|
||||
supports_brightness = self._supports_brightness()
|
||||
|
||||
if self._attr_supported_color_modes is None:
|
||||
supported_color_modes = set()
|
||||
if supports_color:
|
||||
supported_color_modes.add(ColorMode.XY)
|
||||
if self._supports_hs_color():
|
||||
# work out what (color)features are supported
|
||||
supported_color_modes: set[ColorMode] = set()
|
||||
# brightness support
|
||||
if self._entity_info.endpoint.has_attribute(
|
||||
None, clusters.LevelControl.Attributes.CurrentLevel
|
||||
):
|
||||
supported_color_modes.add(ColorMode.BRIGHTNESS)
|
||||
# colormode(s)
|
||||
if self._entity_info.endpoint.has_attribute(
|
||||
None, clusters.ColorControl.Attributes.ColorMode
|
||||
):
|
||||
capabilities = self.get_matter_attribute_value(
|
||||
clusters.ColorControl.Attributes.ColorCapabilities
|
||||
)
|
||||
|
||||
assert capabilities is not None
|
||||
|
||||
if capabilities & ColorCapabilities.kHueSaturationSupported:
|
||||
supported_color_modes.add(ColorMode.HS)
|
||||
|
||||
if supports_color_temperature:
|
||||
supported_color_modes.add(ColorMode.COLOR_TEMP)
|
||||
if capabilities & ColorCapabilities.kXYAttributesSupported:
|
||||
supported_color_modes.add(ColorMode.XY)
|
||||
|
||||
if supports_brightness:
|
||||
supported_color_modes.add(ColorMode.BRIGHTNESS)
|
||||
if capabilities & ColorCapabilities.kColorTemperatureSupported:
|
||||
supported_color_modes.add(ColorMode.COLOR_TEMP)
|
||||
|
||||
self._attr_supported_color_modes = (
|
||||
supported_color_modes if supported_color_modes else None
|
||||
self._attr_supported_color_modes = supported_color_modes
|
||||
|
||||
LOGGER.debug(
|
||||
"Supported color modes: %s for %s",
|
||||
self._attr_supported_color_modes,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
LOGGER.debug(
|
||||
"Supported color modes: %s for %s",
|
||||
self._attr_supported_color_modes,
|
||||
self._device_type_instance,
|
||||
)
|
||||
# set current values
|
||||
|
||||
if supports_color:
|
||||
if self.supports_color:
|
||||
self._attr_color_mode = self._get_color_mode()
|
||||
if self._attr_color_mode == ColorMode.HS:
|
||||
self._attr_hs_color = self._get_hs_color()
|
||||
else:
|
||||
self._attr_xy_color = self._get_xy_color()
|
||||
|
||||
if supports_color_temperature:
|
||||
if self.supports_color_temperature:
|
||||
self._attr_color_temp = self._get_color_temperature()
|
||||
|
||||
self._attr_is_on = self.get_matter_attribute_value(
|
||||
clusters.OnOff.Attributes.OnOff
|
||||
)
|
||||
|
||||
if supports_brightness:
|
||||
if self.supports_brightness:
|
||||
self._attr_brightness = self._get_brightness()
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterLightEntityDescription(
|
||||
LightEntityDescription,
|
||||
MatterEntityDescriptionBaseClass,
|
||||
):
|
||||
"""Matter light entity description."""
|
||||
# This enum should be removed once the ColorControlCapabilities enum is added to the CHIP (Matter) library
|
||||
# clusters.ColorControl.Bitmap.ColorCapabilities
|
||||
class ColorCapabilities(IntFlag):
|
||||
"""Color control capabilities bitmap."""
|
||||
|
||||
kHueSaturationSupported = 0x1
|
||||
kEnhancedHueSupported = 0x2
|
||||
kColorLoopSupported = 0x4
|
||||
kXYAttributesSupported = 0x8
|
||||
kColorTemperatureSupported = 0x10
|
||||
|
||||
|
||||
# You can't set default values on inherited data classes
|
||||
MatterLightEntityDescriptionFactory = partial(
|
||||
MatterLightEntityDescription, entity_cls=MatterLight
|
||||
)
|
||||
|
||||
# Mapping of a Matter Device type to Light Entity Description.
|
||||
# A Matter device type (instance) can consist of multiple attributes.
|
||||
# For example a Color Light which has an attribute to control brightness
|
||||
# but also for color.
|
||||
|
||||
DEVICE_ENTITY: dict[
|
||||
type[device_types.DeviceType],
|
||||
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
|
||||
] = {
|
||||
device_types.OnOffLight: MatterLightEntityDescriptionFactory(
|
||||
key=device_types.OnOffLight,
|
||||
subscribe_attributes=(clusters.OnOff.Attributes.OnOff,),
|
||||
),
|
||||
device_types.DimmableLight: MatterLightEntityDescriptionFactory(
|
||||
key=device_types.DimmableLight,
|
||||
subscribe_attributes=(
|
||||
clusters.OnOff.Attributes.OnOff,
|
||||
clusters.LevelControl.Attributes.CurrentLevel,
|
||||
),
|
||||
),
|
||||
device_types.DimmablePlugInUnit: MatterLightEntityDescriptionFactory(
|
||||
key=device_types.DimmablePlugInUnit,
|
||||
subscribe_attributes=(
|
||||
clusters.OnOff.Attributes.OnOff,
|
||||
clusters.LevelControl.Attributes.CurrentLevel,
|
||||
),
|
||||
),
|
||||
device_types.ColorTemperatureLight: MatterLightEntityDescriptionFactory(
|
||||
key=device_types.ColorTemperatureLight,
|
||||
subscribe_attributes=(
|
||||
clusters.OnOff.Attributes.OnOff,
|
||||
clusters.LevelControl.Attributes.CurrentLevel,
|
||||
clusters.ColorControl.Attributes.ColorMode,
|
||||
clusters.ColorControl.Attributes.ColorTemperatureMireds,
|
||||
),
|
||||
),
|
||||
device_types.ExtendedColorLight: MatterLightEntityDescriptionFactory(
|
||||
key=device_types.ExtendedColorLight,
|
||||
subscribe_attributes=(
|
||||
clusters.OnOff.Attributes.OnOff,
|
||||
# Discovery schema(s) to map Matter Attributes to HA entities
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.LIGHT,
|
||||
entity_description=LightEntityDescription(key="MatterLight"),
|
||||
entity_class=MatterLight,
|
||||
required_attributes=(clusters.OnOff.Attributes.OnOff,),
|
||||
optional_attributes=(
|
||||
clusters.LevelControl.Attributes.CurrentLevel,
|
||||
clusters.ColorControl.Attributes.ColorMode,
|
||||
clusters.ColorControl.Attributes.CurrentHue,
|
||||
@@ -442,5 +372,7 @@ DEVICE_ENTITY: dict[
|
||||
clusters.ColorControl.Attributes.CurrentY,
|
||||
clusters.ColorControl.Attributes.ColorTemperatureMireds,
|
||||
),
|
||||
# restrict device type to prevent discovery in switch platform
|
||||
not_device_type=(device_types.OnOffPlugInUnit,),
|
||||
),
|
||||
}
|
||||
]
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/matter",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["python-matter-server==3.0.0"]
|
||||
"requirements": ["python-matter-server==3.1.0"]
|
||||
}
|
||||
|
109
homeassistant/components/matter/models.py
Normal file
109
homeassistant/components/matter/models.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Models used for the Matter integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import asdict, dataclass
|
||||
from typing import Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from chip.clusters.Objects import ClusterAttributeDescriptor
|
||||
from matter_server.client.models.device_types import DeviceType
|
||||
from matter_server.client.models.node import MatterEndpoint
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
|
||||
class DataclassMustHaveAtLeastOne:
|
||||
"""A dataclass that must have at least one input parameter that is not None."""
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Post dataclass initialization."""
|
||||
if all(val is None for val in asdict(self).values()):
|
||||
raise ValueError("At least one input parameter must not be None")
|
||||
|
||||
|
||||
SensorValueTypes = type[
|
||||
clusters.uint | int | clusters.Nullable | clusters.float32 | float
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterEntityInfo:
|
||||
"""Info discovered from (primary) Matter Attribute to create entity."""
|
||||
|
||||
# MatterEndpoint to which the value(s) belongs
|
||||
endpoint: MatterEndpoint
|
||||
|
||||
# the home assistant platform for which an entity should be created
|
||||
platform: Platform
|
||||
|
||||
# All attributes that need to be watched by entity (incl. primary)
|
||||
attributes_to_watch: list[type[ClusterAttributeDescriptor]]
|
||||
|
||||
# the entity description to use
|
||||
entity_description: EntityDescription
|
||||
|
||||
# entity class to use to instantiate the entity
|
||||
entity_class: type
|
||||
|
||||
# [optional] function to call to convert the value from the primary attribute
|
||||
measurement_to_ha: Callable[[SensorValueTypes], SensorValueTypes] | None = None
|
||||
|
||||
@property
|
||||
def primary_attribute(self) -> type[ClusterAttributeDescriptor]:
|
||||
"""Return Primary Attribute belonging to the entity."""
|
||||
return self.attributes_to_watch[0]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterDiscoverySchema:
|
||||
"""Matter discovery schema.
|
||||
|
||||
The Matter endpoint and it's (primary) Attribute for an entity must match these conditions.
|
||||
"""
|
||||
|
||||
# specify the hass platform for which this scheme applies (e.g. light, sensor)
|
||||
platform: Platform
|
||||
|
||||
# platform-specific entity description
|
||||
entity_description: EntityDescription
|
||||
|
||||
# entity class to use to instantiate the entity
|
||||
entity_class: type
|
||||
|
||||
# DISCOVERY OPTIONS
|
||||
|
||||
# [required] attributes that ALL need to be present
|
||||
# on the node for this scheme to pass (minimal one == primary)
|
||||
required_attributes: tuple[type[ClusterAttributeDescriptor], ...]
|
||||
|
||||
# [optional] the value's endpoint must contain this devicetype(s)
|
||||
device_type: tuple[type[DeviceType] | DeviceType, ...] | None = None
|
||||
|
||||
# [optional] the value's endpoint must NOT contain this devicetype(s)
|
||||
not_device_type: tuple[type[DeviceType] | DeviceType, ...] | None = None
|
||||
|
||||
# [optional] the endpoint's vendor_id must match ANY of these values
|
||||
vendor_id: tuple[int, ...] | None = None
|
||||
|
||||
# [optional] the endpoint's product_name must match ANY of these values
|
||||
product_name: tuple[str, ...] | None = None
|
||||
|
||||
# [optional] the attribute's endpoint_id must match ANY of these values
|
||||
endpoint_id: tuple[int, ...] | None = None
|
||||
|
||||
# [optional] additional attributes that MAY NOT be present
|
||||
# on the node for this scheme to pass
|
||||
absent_attributes: tuple[type[ClusterAttributeDescriptor], ...] | None = None
|
||||
|
||||
# [optional] additional attributes that may be present
|
||||
# these attributes are copied over to attributes_to_watch and
|
||||
# are not discovered by other entities
|
||||
optional_attributes: tuple[type[ClusterAttributeDescriptor], ...] | None = None
|
||||
|
||||
# [optional] bool to specify if this primary value may be discovered
|
||||
# by multiple platforms
|
||||
allow_multi: bool = False
|
||||
|
||||
# [optional] function to call to convert the value from the primary attribute
|
||||
measurement_to_ha: Callable[[Any], Any] | None = None
|
@@ -1,13 +1,8 @@
|
||||
"""Matter sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from chip.clusters.Types import Nullable, NullValue
|
||||
from matter_server.client.models import device_types
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -27,8 +22,9 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescriptionBaseClass
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -45,94 +41,94 @@ class MatterSensor(MatterEntity, SensorEntity):
|
||||
"""Representation of a Matter sensor."""
|
||||
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
entity_description: MatterSensorEntityDescription
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
measurement: Nullable | float | None
|
||||
measurement = self.get_matter_attribute_value(
|
||||
# We always subscribe to a single value
|
||||
self.entity_description.subscribe_attributes[0],
|
||||
)
|
||||
|
||||
if measurement == NullValue or measurement is None:
|
||||
measurement = None
|
||||
else:
|
||||
measurement = self.entity_description.measurement_to_ha(measurement)
|
||||
|
||||
self._attr_native_value = measurement
|
||||
value: Nullable | float | None
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value in (None, NullValue):
|
||||
value = None
|
||||
elif value_convert := self._entity_info.measurement_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_native_value = value
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterSensorEntityDescriptionMixin:
|
||||
"""Required fields for sensor device mapping."""
|
||||
|
||||
measurement_to_ha: Callable[[float], float]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterSensorEntityDescription(
|
||||
SensorEntityDescription,
|
||||
MatterEntityDescriptionBaseClass,
|
||||
MatterSensorEntityDescriptionMixin,
|
||||
):
|
||||
"""Matter Sensor entity description."""
|
||||
|
||||
|
||||
# You can't set default values on inherited data classes
|
||||
MatterSensorEntityDescriptionFactory = partial(
|
||||
MatterSensorEntityDescription, entity_cls=MatterSensor
|
||||
)
|
||||
|
||||
|
||||
DEVICE_ENTITY: dict[
|
||||
type[device_types.DeviceType],
|
||||
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
|
||||
] = {
|
||||
device_types.TemperatureSensor: MatterSensorEntityDescriptionFactory(
|
||||
key=device_types.TemperatureSensor,
|
||||
name="Temperature",
|
||||
measurement_to_ha=lambda x: x / 100,
|
||||
subscribe_attributes=(
|
||||
clusters.TemperatureMeasurement.Attributes.MeasuredValue,
|
||||
# Discovery schema(s) to map Matter Attributes to HA entities
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=SensorEntityDescription(
|
||||
key="TemperatureSensor",
|
||||
name="Temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
),
|
||||
device_types.PressureSensor: MatterSensorEntityDescriptionFactory(
|
||||
key=device_types.PressureSensor,
|
||||
name="Pressure",
|
||||
measurement_to_ha=lambda x: x / 10,
|
||||
subscribe_attributes=(clusters.PressureMeasurement.Attributes.MeasuredValue,),
|
||||
native_unit_of_measurement=UnitOfPressure.KPA,
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
),
|
||||
device_types.FlowSensor: MatterSensorEntityDescriptionFactory(
|
||||
key=device_types.FlowSensor,
|
||||
name="Flow",
|
||||
measurement_to_ha=lambda x: x / 10,
|
||||
subscribe_attributes=(clusters.FlowMeasurement.Attributes.MeasuredValue,),
|
||||
native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR,
|
||||
),
|
||||
device_types.HumiditySensor: MatterSensorEntityDescriptionFactory(
|
||||
key=device_types.HumiditySensor,
|
||||
name="Humidity",
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.TemperatureMeasurement.Attributes.MeasuredValue,),
|
||||
measurement_to_ha=lambda x: x / 100,
|
||||
subscribe_attributes=(
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=SensorEntityDescription(
|
||||
key="PressureSensor",
|
||||
name="Pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.KPA,
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.PressureMeasurement.Attributes.MeasuredValue,),
|
||||
measurement_to_ha=lambda x: x / 10,
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=SensorEntityDescription(
|
||||
key="FlowSensor",
|
||||
name="Flow",
|
||||
native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR,
|
||||
device_class=SensorDeviceClass.WATER, # what is the device class here ?
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.FlowMeasurement.Attributes.MeasuredValue,),
|
||||
measurement_to_ha=lambda x: x / 10,
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=SensorEntityDescription(
|
||||
key="HumiditySensor",
|
||||
name="Humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(
|
||||
clusters.RelativeHumidityMeasurement.Attributes.MeasuredValue,
|
||||
),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
measurement_to_ha=lambda x: x / 100,
|
||||
),
|
||||
device_types.LightSensor: MatterSensorEntityDescriptionFactory(
|
||||
key=device_types.LightSensor,
|
||||
name="Light",
|
||||
measurement_to_ha=lambda x: round(pow(10, ((x - 1) / 10000)), 1),
|
||||
subscribe_attributes=(
|
||||
clusters.IlluminanceMeasurement.Attributes.MeasuredValue,
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=SensorEntityDescription(
|
||||
key="LightSensor",
|
||||
name="Illuminance",
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
),
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.IlluminanceMeasurement.Attributes.MeasuredValue,),
|
||||
measurement_to_ha=lambda x: round(pow(10, ((x - 1) / 10000)), 1),
|
||||
),
|
||||
}
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=SensorEntityDescription(
|
||||
key="PowerSource",
|
||||
name="Battery",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
required_attributes=(clusters.PowerSource.Attributes.BatPercentRemaining,),
|
||||
# value has double precision
|
||||
measurement_to_ha=lambda x: int(x / 2),
|
||||
),
|
||||
]
|
||||
|
@@ -1,8 +1,6 @@
|
||||
"""Matter switches."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@@ -18,8 +16,9 @@ from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescriptionBaseClass
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -35,21 +34,19 @@ async def async_setup_entry(
|
||||
class MatterSwitch(MatterEntity, SwitchEntity):
|
||||
"""Representation of a Matter switch."""
|
||||
|
||||
entity_description: MatterSwitchEntityDescription
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn switch on."""
|
||||
await self.matter_client.send_device_command(
|
||||
node_id=self._device_type_instance.node.node_id,
|
||||
endpoint_id=self._device_type_instance.endpoint_id,
|
||||
node_id=self._endpoint.node.node_id,
|
||||
endpoint_id=self._endpoint.endpoint_id,
|
||||
command=clusters.OnOff.Commands.On(),
|
||||
)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn switch off."""
|
||||
await self.matter_client.send_device_command(
|
||||
node_id=self._device_type_instance.node.node_id,
|
||||
endpoint_id=self._device_type_instance.endpoint_id,
|
||||
node_id=self._endpoint.node.node_id,
|
||||
endpoint_id=self._endpoint.endpoint_id,
|
||||
command=clusters.OnOff.Commands.Off(),
|
||||
)
|
||||
|
||||
@@ -57,31 +54,21 @@ class MatterSwitch(MatterEntity, SwitchEntity):
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
self._attr_is_on = self.get_matter_attribute_value(
|
||||
clusters.OnOff.Attributes.OnOff
|
||||
self._entity_info.primary_attribute
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatterSwitchEntityDescription(
|
||||
SwitchEntityDescription,
|
||||
MatterEntityDescriptionBaseClass,
|
||||
):
|
||||
"""Matter Switch entity description."""
|
||||
|
||||
|
||||
# You can't set default values on inherited data classes
|
||||
MatterSwitchEntityDescriptionFactory = partial(
|
||||
MatterSwitchEntityDescription, entity_cls=MatterSwitch
|
||||
)
|
||||
|
||||
|
||||
DEVICE_ENTITY: dict[
|
||||
type[device_types.DeviceType],
|
||||
MatterEntityDescriptionBaseClass | list[MatterEntityDescriptionBaseClass],
|
||||
] = {
|
||||
device_types.OnOffPlugInUnit: MatterSwitchEntityDescriptionFactory(
|
||||
key=device_types.OnOffPlugInUnit,
|
||||
subscribe_attributes=(clusters.OnOff.Attributes.OnOff,),
|
||||
device_class=SwitchDeviceClass.OUTLET,
|
||||
# Discovery schema(s) to map Matter Attributes to HA entities
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=SwitchEntityDescription(
|
||||
key="MatterPlug", device_class=SwitchDeviceClass.OUTLET
|
||||
),
|
||||
entity_class=MatterSwitch,
|
||||
required_attributes=(clusters.OnOff.Attributes.OnOff,),
|
||||
# restrict device type to prevent discovery by light
|
||||
# platform which also uses OnOff cluster
|
||||
not_device_type=(device_types.OnOffLight, device_types.DimmableLight),
|
||||
),
|
||||
}
|
||||
]
|
||||
|
@@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mopeka",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mopeka_iot_ble==0.4.0"]
|
||||
"requirements": ["mopeka_iot_ble==0.4.1"]
|
||||
}
|
||||
|
@@ -8,11 +8,11 @@ from datetime import timedelta
|
||||
from functools import cached_property
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
from nibe.connection import Connection
|
||||
from nibe.connection.modbus import Modbus
|
||||
from nibe.connection.nibegw import NibeGW, ProductInfo
|
||||
from nibe.exceptions import CoilNotFoundException, CoilReadException
|
||||
from nibe.exceptions import CoilNotFoundException, ReadException
|
||||
from nibe.heatpump import HeatPump, Model, Series
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -182,7 +182,7 @@ class ContextCoordinator(
|
||||
return release_update
|
||||
|
||||
|
||||
class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
class Coordinator(ContextCoordinator[dict[int, CoilData], int]):
|
||||
"""Update coordinator for nibe heat pumps."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
@@ -199,17 +199,18 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
)
|
||||
|
||||
self.data = {}
|
||||
self.seed: dict[int, Coil] = {}
|
||||
self.seed: dict[int, CoilData] = {}
|
||||
self.connection = connection
|
||||
self.heatpump = heatpump
|
||||
self.task: asyncio.Task | None = None
|
||||
|
||||
heatpump.subscribe(heatpump.COIL_UPDATE_EVENT, self._on_coil_update)
|
||||
|
||||
def _on_coil_update(self, coil: Coil):
|
||||
def _on_coil_update(self, data: CoilData):
|
||||
"""Handle callback on coil updates."""
|
||||
self.data[coil.address] = coil
|
||||
self.seed[coil.address] = coil
|
||||
coil = data.coil
|
||||
self.data[coil.address] = data
|
||||
self.seed[coil.address] = data
|
||||
self.async_update_context_listeners([coil.address])
|
||||
|
||||
@property
|
||||
@@ -246,26 +247,26 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
|
||||
async def async_write_coil(self, coil: Coil, value: int | float | str) -> None:
|
||||
"""Write coil and update state."""
|
||||
coil.value = value
|
||||
coil = await self.connection.write_coil(coil)
|
||||
data = CoilData(coil, value)
|
||||
await self.connection.write_coil(data)
|
||||
|
||||
self.data[coil.address] = coil
|
||||
self.data[coil.address] = data
|
||||
|
||||
self.async_update_context_listeners([coil.address])
|
||||
|
||||
async def async_read_coil(self, coil: Coil) -> Coil:
|
||||
async def async_read_coil(self, coil: Coil) -> CoilData:
|
||||
"""Read coil and update state using callbacks."""
|
||||
return await self.connection.read_coil(coil)
|
||||
|
||||
async def _async_update_data(self) -> dict[int, Coil]:
|
||||
async def _async_update_data(self) -> dict[int, CoilData]:
|
||||
self.task = asyncio.current_task()
|
||||
try:
|
||||
return await self._async_update_data_internal()
|
||||
finally:
|
||||
self.task = None
|
||||
|
||||
async def _async_update_data_internal(self) -> dict[int, Coil]:
|
||||
result: dict[int, Coil] = {}
|
||||
async def _async_update_data_internal(self) -> dict[int, CoilData]:
|
||||
result: dict[int, CoilData] = {}
|
||||
|
||||
def _get_coils() -> Iterable[Coil]:
|
||||
for address in sorted(self.context_callbacks.keys()):
|
||||
@@ -282,10 +283,10 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
yield coil
|
||||
|
||||
try:
|
||||
async for coil in self.connection.read_coils(_get_coils()):
|
||||
result[coil.address] = coil
|
||||
self.seed.pop(coil.address, None)
|
||||
except CoilReadException as exception:
|
||||
async for data in self.connection.read_coils(_get_coils()):
|
||||
result[data.coil.address] = data
|
||||
self.seed.pop(data.coil.address, None)
|
||||
except ReadException as exception:
|
||||
if not result:
|
||||
raise UpdateFailed(f"Failed to update: {exception}") from exception
|
||||
self.logger.debug(
|
||||
@@ -329,7 +330,7 @@ class CoilEntity(CoordinatorEntity[Coordinator]):
|
||||
self.coordinator.data or {}
|
||||
)
|
||||
|
||||
def _async_read_coil(self, coil: Coil):
|
||||
def _async_read_coil(self, data: CoilData):
|
||||
"""Update state of entity based on coil data."""
|
||||
|
||||
async def _async_write_coil(self, value: int | float | str):
|
||||
@@ -337,10 +338,9 @@ class CoilEntity(CoordinatorEntity[Coordinator]):
|
||||
await self.coordinator.async_write_coil(self._coil, value)
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
coil = self.coordinator.data.get(self._coil.address)
|
||||
if coil is None:
|
||||
data = self.coordinator.data.get(self._coil.address)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
self._coil = coil
|
||||
self._async_read_coil(coil)
|
||||
self._async_read_coil(data)
|
||||
self.async_write_ha_state()
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump binary sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.binary_sensor import ENTITY_ID_FORMAT, BinarySensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -37,5 +37,5 @@ class BinarySensor(CoilEntity, BinarySensorEntity):
|
||||
"""Initialize entity."""
|
||||
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
self._attr_is_on = coil.value == "ON"
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
self._attr_is_on = data.value == "ON"
|
||||
|
@@ -8,10 +8,10 @@ from nibe.connection.nibegw import NibeGW
|
||||
from nibe.exceptions import (
|
||||
AddressInUseException,
|
||||
CoilNotFoundException,
|
||||
CoilReadException,
|
||||
CoilReadSendException,
|
||||
CoilWriteException,
|
||||
CoilWriteSendException,
|
||||
ReadException,
|
||||
ReadSendException,
|
||||
WriteException,
|
||||
)
|
||||
from nibe.heatpump import HeatPump, Model
|
||||
import voluptuous as vol
|
||||
@@ -108,13 +108,13 @@ async def validate_nibegw_input(
|
||||
|
||||
try:
|
||||
await connection.verify_connectivity()
|
||||
except (CoilReadSendException, CoilWriteSendException) as exception:
|
||||
except (ReadSendException, CoilWriteSendException) as exception:
|
||||
raise FieldError(str(exception), CONF_IP_ADDRESS, "address") from exception
|
||||
except CoilNotFoundException as exception:
|
||||
raise FieldError("Coils not found", "base", "model") from exception
|
||||
except CoilReadException as exception:
|
||||
except ReadException as exception:
|
||||
raise FieldError("Timeout on read from pump", "base", "read") from exception
|
||||
except CoilWriteException as exception:
|
||||
except WriteException as exception:
|
||||
raise FieldError("Timeout on writing to pump", "base", "write") from exception
|
||||
finally:
|
||||
await connection.stop()
|
||||
@@ -147,13 +147,13 @@ async def validate_modbus_input(
|
||||
|
||||
try:
|
||||
await connection.verify_connectivity()
|
||||
except (CoilReadSendException, CoilWriteSendException) as exception:
|
||||
except (ReadSendException, CoilWriteSendException) as exception:
|
||||
raise FieldError(str(exception), CONF_MODBUS_URL, "address") from exception
|
||||
except CoilNotFoundException as exception:
|
||||
raise FieldError("Coils not found", "base", "model") from exception
|
||||
except CoilReadException as exception:
|
||||
except ReadException as exception:
|
||||
raise FieldError("Timeout on read from pump", "base", "read") from exception
|
||||
except CoilWriteException as exception:
|
||||
except WriteException as exception:
|
||||
raise FieldError("Timeout on writing to pump", "base", "write") from exception
|
||||
finally:
|
||||
await connection.stop()
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==1.6.0"]
|
||||
"requirements": ["nibe==2.0.0"]
|
||||
}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump numbers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.number import ENTITY_ID_FORMAT, NumberEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -58,13 +58,13 @@ class Number(CoilEntity, NumberEntity):
|
||||
self._attr_native_unit_of_measurement = coil.unit
|
||||
self._attr_native_value = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
if coil.value is None:
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
if data.value is None:
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_native_value = float(coil.value)
|
||||
self._attr_native_value = float(data.value)
|
||||
except ValueError:
|
||||
self._attr_native_value = None
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump select."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.select import ENTITY_ID_FORMAT, SelectEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -40,12 +40,12 @@ class Select(CoilEntity, SelectEntity):
|
||||
self._attr_options = list(coil.mappings.values())
|
||||
self._attr_current_option = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
if not isinstance(coil.value, str):
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
if not isinstance(data.value, str):
|
||||
self._attr_current_option = None
|
||||
return
|
||||
|
||||
self._attr_current_option = coil.value
|
||||
self._attr_current_option = data.value
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Support writing value."""
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
ENTITY_ID_FORMAT,
|
||||
@@ -146,5 +146,5 @@ class Sensor(CoilEntity, SensorEntity):
|
||||
self._attr_native_unit_of_measurement = coil.unit
|
||||
self._attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def _async_read_coil(self, coil: Coil):
|
||||
self._attr_native_value = coil.value
|
||||
def _async_read_coil(self, data: CoilData):
|
||||
self._attr_native_value = data.value
|
||||
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -40,8 +40,8 @@ class Switch(CoilEntity, SwitchEntity):
|
||||
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
|
||||
self._attr_is_on = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
self._attr_is_on = coil.value == "ON"
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
self._attr_is_on = data.value == "ON"
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
|
@@ -1 +1,18 @@
|
||||
"""The Obihai integration."""
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import PLATFORMS
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up from a config entry."""
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
73
homeassistant/components/obihai/config_flow.py
Normal file
73
homeassistant/components/obihai/config_flow.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""Config flow to configure the Obihai integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .connectivity import validate_auth
|
||||
from .const import DEFAULT_PASSWORD, DEFAULT_USERNAME, DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
default=DEFAULT_USERNAME,
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
default=DEFAULT_PASSWORD,
|
||||
): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ObihaiFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Obihai."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
if await self.hass.async_add_executor_job(
|
||||
validate_auth,
|
||||
user_input[CONF_HOST],
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
):
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_HOST],
|
||||
data=user_input,
|
||||
)
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
data_schema = self.add_suggested_values_to_schema(DATA_SCHEMA, user_input)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
data_schema=data_schema,
|
||||
)
|
||||
|
||||
# DEPRECATED
|
||||
async def async_step_import(self, config: dict[str, Any]) -> FlowResult:
|
||||
"""Handle a flow initialized by importing a config."""
|
||||
self._async_abort_entries_match({CONF_HOST: config[CONF_HOST]})
|
||||
return self.async_create_entry(
|
||||
title=config.get(CONF_NAME, config[CONF_HOST]),
|
||||
data={
|
||||
CONF_HOST: config[CONF_HOST],
|
||||
CONF_PASSWORD: config[CONF_PASSWORD],
|
||||
CONF_USERNAME: config[CONF_USERNAME],
|
||||
},
|
||||
)
|
67
homeassistant/components/obihai/connectivity.py
Normal file
67
homeassistant/components/obihai/connectivity.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Support for Obihai Connectivity."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pyobihai import PyObihai
|
||||
|
||||
from .const import DEFAULT_PASSWORD, DEFAULT_USERNAME, LOGGER
|
||||
|
||||
|
||||
def get_pyobihai(
|
||||
host: str,
|
||||
username: str,
|
||||
password: str,
|
||||
) -> PyObihai:
|
||||
"""Retrieve an authenticated PyObihai."""
|
||||
return PyObihai(host, username, password)
|
||||
|
||||
|
||||
def validate_auth(
|
||||
host: str,
|
||||
username: str,
|
||||
password: str,
|
||||
) -> bool:
|
||||
"""Test if the given setting works as expected."""
|
||||
obi = get_pyobihai(host, username, password)
|
||||
|
||||
login = obi.check_account()
|
||||
if not login:
|
||||
LOGGER.debug("Invalid credentials")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class ObihaiConnection:
|
||||
"""Contains a list of Obihai Sensors."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
host: str,
|
||||
username: str = DEFAULT_USERNAME,
|
||||
password: str = DEFAULT_PASSWORD,
|
||||
) -> None:
|
||||
"""Store configuration."""
|
||||
self.sensors: list = []
|
||||
self.host = host
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.serial: list = []
|
||||
self.services: list = []
|
||||
self.line_services: list = []
|
||||
self.call_direction: list = []
|
||||
self.pyobihai: PyObihai = None
|
||||
|
||||
def update(self) -> bool:
|
||||
"""Validate connection and retrieve a list of sensors."""
|
||||
if not self.pyobihai:
|
||||
self.pyobihai = get_pyobihai(self.host, self.username, self.password)
|
||||
|
||||
if not self.pyobihai.check_account():
|
||||
return False
|
||||
|
||||
self.serial = self.pyobihai.get_device_serial()
|
||||
self.services = self.pyobihai.get_state()
|
||||
self.line_services = self.pyobihai.get_line_state()
|
||||
self.call_direction = self.pyobihai.get_call_direction()
|
||||
|
||||
return True
|
15
homeassistant/components/obihai/const.py
Normal file
15
homeassistant/components/obihai/const.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Constants for the Obihai integration."""
|
||||
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN: Final = "obihai"
|
||||
DEFAULT_USERNAME = "admin"
|
||||
DEFAULT_PASSWORD = "admin"
|
||||
OBIHAI = "Obihai"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
PLATFORMS: Final = [Platform.SENSOR]
|
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"domain": "obihai",
|
||||
"name": "Obihai",
|
||||
"codeowners": ["@dshokouhi"],
|
||||
"codeowners": ["@dshokouhi", "@ejpenney"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/obihai",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyobihai"],
|
||||
|
@@ -2,9 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyobihai import PyObihai
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -12,20 +10,19 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .connectivity import ObihaiConnection
|
||||
from .const import DEFAULT_PASSWORD, DEFAULT_USERNAME, DOMAIN, OBIHAI
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=5)
|
||||
|
||||
OBIHAI = "Obihai"
|
||||
DEFAULT_USERNAME = "admin"
|
||||
DEFAULT_PASSWORD = "admin"
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
@@ -35,46 +32,58 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
# DEPRECATED
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Obihai sensor platform."""
|
||||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"manual_migration",
|
||||
breaks_in_ha_version="2023.6.0",
|
||||
is_fixable=False,
|
||||
severity=issue_registry.IssueSeverity.ERROR,
|
||||
translation_key="manual_migration",
|
||||
)
|
||||
|
||||
username = config[CONF_USERNAME]
|
||||
password = config[CONF_PASSWORD]
|
||||
host = config[CONF_HOST]
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up the Obihai sensor entries."""
|
||||
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
host = entry.data[CONF_HOST]
|
||||
requester = ObihaiConnection(host, username, password)
|
||||
|
||||
await hass.async_add_executor_job(requester.update)
|
||||
sensors = []
|
||||
for key in requester.services:
|
||||
sensors.append(ObihaiServiceSensors(requester.pyobihai, requester.serial, key))
|
||||
|
||||
pyobihai = PyObihai(host, username, password)
|
||||
if requester.line_services is not None:
|
||||
for key in requester.line_services:
|
||||
sensors.append(
|
||||
ObihaiServiceSensors(requester.pyobihai, requester.serial, key)
|
||||
)
|
||||
|
||||
login = pyobihai.check_account()
|
||||
if not login:
|
||||
_LOGGER.error("Invalid credentials")
|
||||
return
|
||||
for key in requester.call_direction:
|
||||
sensors.append(ObihaiServiceSensors(requester.pyobihai, requester.serial, key))
|
||||
|
||||
serial = pyobihai.get_device_serial()
|
||||
|
||||
services = pyobihai.get_state()
|
||||
|
||||
line_services = pyobihai.get_line_state()
|
||||
|
||||
call_direction = pyobihai.get_call_direction()
|
||||
|
||||
for key in services:
|
||||
sensors.append(ObihaiServiceSensors(pyobihai, serial, key))
|
||||
|
||||
if line_services is not None:
|
||||
for key in line_services:
|
||||
sensors.append(ObihaiServiceSensors(pyobihai, serial, key))
|
||||
|
||||
for key in call_direction:
|
||||
sensors.append(ObihaiServiceSensors(pyobihai, serial, key))
|
||||
|
||||
add_entities(sensors)
|
||||
async_add_entities(sensors, update_before_add=True)
|
||||
|
||||
|
||||
class ObihaiServiceSensors(SensorEntity):
|
||||
@@ -148,6 +157,10 @@ class ObihaiServiceSensors(SensorEntity):
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the sensor."""
|
||||
if not self._pyobihai.check_account():
|
||||
self._state = None
|
||||
return
|
||||
|
||||
services = self._pyobihai.get_state()
|
||||
|
||||
if self._service_name in services:
|
||||
|
25
homeassistant/components/obihai/strings.json
Normal file
25
homeassistant/components/obihai/strings.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"manual_migration": {
|
||||
"title": "Manual migration required for Obihai",
|
||||
"description": "Configuration of the Obihai platform in YAML is deprecated and will be removed in Home Assistant 2023.6; Your existing configuration has been imported into the UI automatically and can be safely removed from your configuration.yaml file."
|
||||
}
|
||||
}
|
||||
}
|
@@ -3,6 +3,7 @@ import asyncio
|
||||
from datetime import date, datetime
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
import pyotgw
|
||||
import pyotgw.vars as gw_vars
|
||||
from serial import SerialException
|
||||
@@ -112,10 +113,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
config_entry.add_update_listener(options_updated)
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
gateway.connect_and_subscribe(),
|
||||
timeout=CONNECTION_TIMEOUT,
|
||||
)
|
||||
async with async_timeout.timeout(CONNECTION_TIMEOUT):
|
||||
await gateway.connect_and_subscribe()
|
||||
except (asyncio.TimeoutError, ConnectionError, SerialException) as ex:
|
||||
await gateway.cleanup()
|
||||
raise ConfigEntryNotReady(
|
||||
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
import async_timeout
|
||||
import pyotgw
|
||||
from pyotgw import vars as gw_vars
|
||||
from serial import SerialException
|
||||
@@ -68,10 +69,8 @@ class OpenThermGwConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
return status[gw_vars.OTGW].get(gw_vars.OTGW_ABOUT)
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
test_connection(),
|
||||
timeout=CONNECTION_TIMEOUT,
|
||||
)
|
||||
async with async_timeout.timeout(CONNECTION_TIMEOUT):
|
||||
await test_connection()
|
||||
except asyncio.TimeoutError:
|
||||
return self._show_form({"base": "timeout_connect"})
|
||||
except (ConnectionError, SerialException):
|
||||
|
@@ -1,11 +1,13 @@
|
||||
"""The Open Thread Border Router integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
import dataclasses
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate, ParamSpec, TypeVar
|
||||
|
||||
import aiohttp
|
||||
import python_otbr_api
|
||||
|
||||
from homeassistant.components.thread import async_add_dataset
|
||||
@@ -44,11 +46,23 @@ class OTBRData:
|
||||
url: str
|
||||
api: python_otbr_api.OTBR
|
||||
|
||||
@_handle_otbr_error
|
||||
async def set_enabled(self, enabled: bool) -> None:
|
||||
"""Enable or disable the router."""
|
||||
return await self.api.set_enabled(enabled)
|
||||
|
||||
@_handle_otbr_error
|
||||
async def get_active_dataset_tlvs(self) -> bytes | None:
|
||||
"""Get current active operational dataset in TLVS format, or None."""
|
||||
return await self.api.get_active_dataset_tlvs()
|
||||
|
||||
@_handle_otbr_error
|
||||
async def create_active_dataset(
|
||||
self, dataset: python_otbr_api.OperationalDataSet
|
||||
) -> None:
|
||||
"""Create an active operational dataset."""
|
||||
return await self.api.create_active_dataset(dataset)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Open Thread Border Router component."""
|
||||
@@ -63,8 +77,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
otbrdata = OTBRData(entry.data["url"], api)
|
||||
try:
|
||||
dataset = await otbrdata.get_active_dataset_tlvs()
|
||||
except HomeAssistantError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except (
|
||||
HomeAssistantError,
|
||||
aiohttp.ClientError,
|
||||
asyncio.TimeoutError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
if dataset:
|
||||
await async_add_dataset(hass, entry.title, dataset.hex())
|
||||
|
||||
|
@@ -1,8 +1,10 @@
|
||||
"""Config flow for the Open Thread Border Router integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
import python_otbr_api
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -48,7 +50,11 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
url = user_input[CONF_URL]
|
||||
try:
|
||||
await self._connect_and_create_dataset(url)
|
||||
except python_otbr_api.OTBRError:
|
||||
except (
|
||||
python_otbr_api.OTBRError,
|
||||
aiohttp.ClientError,
|
||||
asyncio.TimeoutError,
|
||||
):
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(DOMAIN)
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/otbr",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==1.0.3"]
|
||||
"requirements": ["python-otbr-api==1.0.4"]
|
||||
}
|
||||
|
@@ -1,6 +1,8 @@
|
||||
"""Websocket API for OTBR."""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import python_otbr_api
|
||||
|
||||
from homeassistant.components.websocket_api import (
|
||||
ActiveConnection,
|
||||
async_register_command,
|
||||
@@ -20,6 +22,7 @@ if TYPE_CHECKING:
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the OTBR Websocket API."""
|
||||
async_register_command(hass, websocket_info)
|
||||
async_register_command(hass, websocket_create_network)
|
||||
|
||||
|
||||
@websocket_command(
|
||||
@@ -51,3 +54,42 @@ async def websocket_info(
|
||||
"active_dataset_tlvs": dataset.hex() if dataset else None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_command(
|
||||
{
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
@async_response
|
||||
async def websocket_create_network(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""Create a new Thread network."""
|
||||
if DOMAIN not in hass.data:
|
||||
connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded")
|
||||
return
|
||||
|
||||
data: OTBRData = hass.data[DOMAIN]
|
||||
|
||||
try:
|
||||
await data.set_enabled(False)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "set_enabled_failed", str(exc))
|
||||
return
|
||||
|
||||
try:
|
||||
await data.create_active_dataset(
|
||||
python_otbr_api.OperationalDataSet(network_name="home-assistant")
|
||||
)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "create_active_dataset_failed", str(exc))
|
||||
return
|
||||
|
||||
try:
|
||||
await data.set_enabled(True)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "set_enabled_failed", str(exc))
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
@@ -10,6 +10,7 @@ from .atlantic_heat_recovery_ventilation import AtlanticHeatRecoveryVentilation
|
||||
from .atlantic_pass_apc_heating_zone import AtlanticPassAPCHeatingZone
|
||||
from .atlantic_pass_apc_zone_control import AtlanticPassAPCZoneControl
|
||||
from .somfy_thermostat import SomfyThermostat
|
||||
from .valve_heating_temperature_interface import ValveHeatingTemperatureInterface
|
||||
|
||||
WIDGET_TO_CLIMATE_ENTITY = {
|
||||
UIWidget.ATLANTIC_ELECTRICAL_HEATER: AtlanticElectricalHeater,
|
||||
@@ -21,4 +22,5 @@ WIDGET_TO_CLIMATE_ENTITY = {
|
||||
UIWidget.ATLANTIC_PASS_APC_HEATING_ZONE: AtlanticPassAPCHeatingZone,
|
||||
UIWidget.ATLANTIC_PASS_APC_ZONE_CONTROL: AtlanticPassAPCZoneControl,
|
||||
UIWidget.SOMFY_THERMOSTAT: SomfyThermostat,
|
||||
UIWidget.VALVE_HEATING_TEMPERATURE_INTERFACE: ValveHeatingTemperatureInterface,
|
||||
}
|
||||
|
@@ -15,6 +15,7 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
PRESET_COMFORT1 = "comfort-1"
|
||||
@@ -47,6 +48,7 @@ class AtlanticElectricalHeater(OverkizEntity, ClimateEntity):
|
||||
_attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ]
|
||||
_attr_supported_features = ClimateEntityFeature.PRESET_MODE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
|
@@ -16,6 +16,7 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
@@ -70,6 +71,7 @@ class AtlanticElectricalHeaterWithAdjustableTemperatureSetpoint(
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
)
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
def __init__(
|
||||
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
|
||||
|
@@ -14,6 +14,7 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
@@ -43,6 +44,7 @@ class AtlanticElectricalTowelDryer(OverkizEntity, ClimateEntity):
|
||||
_attr_hvac_modes = [*HVAC_MODE_TO_OVERKIZ]
|
||||
_attr_preset_modes = [*PRESET_MODE_TO_OVERKIZ]
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
def __init__(
|
||||
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
|
||||
|
@@ -13,6 +13,7 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
@@ -49,6 +50,7 @@ class AtlanticHeatRecoveryVentilation(OverkizEntity, ClimateEntity):
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.FAN_MODE
|
||||
)
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
def __init__(
|
||||
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
|
||||
|
@@ -17,6 +17,7 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
@@ -78,6 +79,7 @@ class AtlanticPassAPCHeatingZone(OverkizEntity, ClimateEntity):
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
def __init__(
|
||||
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
|
||||
|
@@ -15,19 +15,17 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
PRESET_FREEZE = "freeze"
|
||||
PRESET_NIGHT = "night"
|
||||
|
||||
STATE_DEROGATION_ACTIVE = "active"
|
||||
STATE_DEROGATION_INACTIVE = "inactive"
|
||||
|
||||
|
||||
OVERKIZ_TO_HVAC_MODES: dict[str, HVACMode] = {
|
||||
STATE_DEROGATION_ACTIVE: HVACMode.HEAT,
|
||||
STATE_DEROGATION_INACTIVE: HVACMode.AUTO,
|
||||
OverkizCommandParam.ACTIVE: HVACMode.HEAT,
|
||||
OverkizCommandParam.INACTIVE: HVACMode.AUTO,
|
||||
}
|
||||
HVAC_MODES_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_HVAC_MODES.items()}
|
||||
|
||||
@@ -60,6 +58,8 @@ class SomfyThermostat(OverkizEntity, ClimateEntity):
|
||||
)
|
||||
_attr_hvac_modes = [*HVAC_MODES_TO_OVERKIZ]
|
||||
_attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ]
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
# Both min and max temp values have been retrieved from the Somfy Application.
|
||||
_attr_min_temp = 15.0
|
||||
_attr_max_temp = 26.0
|
||||
|
@@ -0,0 +1,137 @@
|
||||
"""Support for ValveHeatingTemperatureInterface."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
PRESET_COMFORT,
|
||||
PRESET_ECO,
|
||||
PRESET_NONE,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
PRESET_MANUAL = "manual"
|
||||
PRESET_FROST_PROTECTION = "frost_protection"
|
||||
|
||||
OVERKIZ_TO_HVAC_ACTION: dict[str, HVACAction] = {
|
||||
OverkizCommandParam.OPEN: HVACAction.HEATING,
|
||||
OverkizCommandParam.CLOSED: HVACAction.IDLE,
|
||||
}
|
||||
|
||||
OVERKIZ_TO_PRESET_MODE: dict[str, str] = {
|
||||
OverkizCommandParam.GEOFENCING_MODE: PRESET_NONE,
|
||||
OverkizCommandParam.SUDDEN_DROP_MODE: PRESET_NONE,
|
||||
OverkizCommandParam.AWAY: PRESET_AWAY,
|
||||
OverkizCommandParam.COMFORT: PRESET_COMFORT,
|
||||
OverkizCommandParam.ECO: PRESET_ECO,
|
||||
OverkizCommandParam.FROSTPROTECTION: PRESET_FROST_PROTECTION,
|
||||
OverkizCommandParam.MANUAL: PRESET_MANUAL,
|
||||
}
|
||||
PRESET_MODE_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_PRESET_MODE.items()}
|
||||
|
||||
TEMPERATURE_SENSOR_DEVICE_INDEX = 2
|
||||
|
||||
|
||||
class ValveHeatingTemperatureInterface(OverkizEntity, ClimateEntity):
|
||||
"""Representation of Valve Heating Temperature Interface device."""
|
||||
|
||||
_attr_hvac_mode = HVACMode.HEAT
|
||||
_attr_hvac_modes = [HVACMode.HEAT]
|
||||
_attr_preset_modes = [*PRESET_MODE_TO_OVERKIZ]
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
def __init__(
|
||||
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
|
||||
) -> None:
|
||||
"""Init method."""
|
||||
super().__init__(device_url, coordinator)
|
||||
self.temperature_device = self.executor.linked_device(
|
||||
TEMPERATURE_SENSOR_DEVICE_INDEX
|
||||
)
|
||||
|
||||
self._attr_min_temp = cast(
|
||||
float, self.executor.select_state(OverkizState.CORE_MIN_SETPOINT)
|
||||
)
|
||||
self._attr_max_temp = cast(
|
||||
float, self.executor.select_state(OverkizState.CORE_MAX_SETPOINT)
|
||||
)
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> str:
|
||||
"""Return the current running hvac operation."""
|
||||
return OVERKIZ_TO_HVAC_ACTION[
|
||||
cast(str, self.executor.select_state(OverkizState.CORE_OPEN_CLOSED_VALVE))
|
||||
]
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float:
|
||||
"""Return the temperature."""
|
||||
return cast(
|
||||
float, self.executor.select_state(OverkizState.CORE_TARGET_TEMPERATURE)
|
||||
)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if temperature := self.temperature_device.states[OverkizState.CORE_TEMPERATURE]:
|
||||
return temperature.value_as_float
|
||||
|
||||
return None
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new temperature."""
|
||||
temperature = kwargs[ATTR_TEMPERATURE]
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_DEROGATION,
|
||||
float(temperature),
|
||||
OverkizCommandParam.FURTHER_NOTICE,
|
||||
)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
return
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str:
|
||||
"""Return the current preset mode, e.g., home, away, temp."""
|
||||
return OVERKIZ_TO_PRESET_MODE[
|
||||
cast(
|
||||
str, self.executor.select_state(OverkizState.IO_DEROGATION_HEATING_MODE)
|
||||
)
|
||||
]
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
|
||||
# If we want to switch to manual mode via a preset, we need to pass in a temperature
|
||||
# Manual mode will be on automatically if an user sets a temperature
|
||||
if preset_mode == PRESET_MANUAL:
|
||||
if current_temperature := self.current_temperature:
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_DEROGATION,
|
||||
current_temperature,
|
||||
OverkizCommandParam.FURTHER_NOTICE,
|
||||
)
|
||||
else:
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_DEROGATION,
|
||||
PRESET_MODE_TO_OVERKIZ[preset_mode],
|
||||
OverkizCommandParam.FURTHER_NOTICE,
|
||||
)
|
@@ -83,6 +83,7 @@ OVERKIZ_DEVICE_TO_PLATFORM: dict[UIClass | UIWidget, Platform | None] = {
|
||||
UIWidget.STATEFUL_ALARM_CONTROLLER: Platform.ALARM_CONTROL_PANEL, # widgetName, uiClass is Alarm (not supported)
|
||||
UIWidget.STATELESS_EXTERIOR_HEATING: Platform.SWITCH, # widgetName, uiClass is ExteriorHeatingSystem (not supported)
|
||||
UIWidget.TSKALARM_CONTROLLER: Platform.ALARM_CONTROL_PANEL, # widgetName, uiClass is Alarm (not supported)
|
||||
UIWidget.VALVE_HEATING_TEMPERATURE_INTERFACE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported)
|
||||
}
|
||||
|
||||
# Map Overkiz camelCase to Home Assistant snake_case for translation
|
||||
|
@@ -13,7 +13,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
||||
"requirements": ["pyoverkiz==1.7.3"],
|
||||
"requirements": ["pyoverkiz==1.7.6"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_kizbox._tcp.local.",
|
||||
|
@@ -28,6 +28,34 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"climate": {
|
||||
"overkiz": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"comfort-1": "Comfort 1",
|
||||
"comfort-2": "Comfort 2",
|
||||
"drying": "Drying",
|
||||
"external": "External",
|
||||
"freeze": "Freeze",
|
||||
"frost_protection": "Frost protection",
|
||||
"manual": "Manual",
|
||||
"night": "Night",
|
||||
"prog": "Prog"
|
||||
}
|
||||
},
|
||||
"fan_mode": {
|
||||
"state": {
|
||||
"away": "Away",
|
||||
"bypass_boost": "Bypass boost",
|
||||
"home_boost": "Home boost",
|
||||
"kitchen_boost": "Kitchen boost"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"open_closed_pedestrian": {
|
||||
"state": {
|
||||
|
@@ -8,6 +8,7 @@ import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
import async_timeout
|
||||
from icmplib import NameLookupError, async_ping
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -230,9 +231,8 @@ class PingDataSubProcess(PingData):
|
||||
close_fds=False, # required for posix_spawn
|
||||
)
|
||||
try:
|
||||
out_data, out_error = await asyncio.wait_for(
|
||||
pinger.communicate(), self._count + PING_TIMEOUT
|
||||
)
|
||||
async with async_timeout.timeout(self._count + PING_TIMEOUT):
|
||||
out_data, out_error = await pinger.communicate()
|
||||
|
||||
if out_data:
|
||||
_LOGGER.debug(
|
||||
|
@@ -17,6 +17,7 @@ from homeassistant.const import (
|
||||
UnitOfPower,
|
||||
UnitOfPressure,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -303,9 +304,9 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="gas_consumed_interval",
|
||||
name="Gas consumed interval",
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
device_class=SensorDeviceClass.GAS,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
icon="mdi:meter-gas",
|
||||
native_unit_of_measurement=f"{UnitOfVolume.CUBIC_METERS}/{UnitOfTime.HOURS}",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="gas_consumed_cumulative",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user