mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 03:49:31 +00:00
Compare commits
37 Commits
mqtt-entit
...
condition_
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1c4baa8dca | ||
![]() |
74660da2d2 | ||
![]() |
6b8c180509 | ||
![]() |
eb4a873c43 | ||
![]() |
6aafa666d6 | ||
![]() |
9ee9bb368d | ||
![]() |
6e4258c8a9 | ||
![]() |
d65e704823 | ||
![]() |
aadaf87c16 | ||
![]() |
e70b147c0c | ||
![]() |
031b12752f | ||
![]() |
df0cfd69a9 | ||
![]() |
b2c53f2d78 | ||
![]() |
3649e949b1 | ||
![]() |
de7e2303a7 | ||
![]() |
892f3f267b | ||
![]() |
0254285285 | ||
![]() |
44a95242dc | ||
![]() |
f9b1c52d65 | ||
![]() |
aa8d78622c | ||
![]() |
ca6289a576 | ||
![]() |
0f372f4b47 | ||
![]() |
4bba167ab3 | ||
![]() |
962c0c443d | ||
![]() |
c6b4cac28a | ||
![]() |
3c7e3a5e30 | ||
![]() |
fa698956c3 | ||
![]() |
32f136b12f | ||
![]() |
e1f617df25 | ||
![]() |
84f1b8a5cc | ||
![]() |
e9cedf4852 | ||
![]() |
9c72b40ab4 | ||
![]() |
65f655e5f5 | ||
![]() |
af28573894 | ||
![]() |
c5fc1de3df | ||
![]() |
1df1144eb9 | ||
![]() |
d51c0e3752 |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -198,7 +198,7 @@ jobs:
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -265,7 +265,7 @@ jobs:
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
|
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1350,6 +1350,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/samsungtv/ @chemelli74 @epenet
|
||||
/homeassistant/components/sanix/ @tomaszsluszniak
|
||||
/tests/components/sanix/ @tomaszsluszniak
|
||||
/homeassistant/components/satel_integra/ @Tommatheussen
|
||||
/tests/components/satel_integra/ @Tommatheussen
|
||||
/homeassistant/components/scene/ @home-assistant/core
|
||||
/tests/components/scene/ @home-assistant/core
|
||||
/homeassistant/components/schedule/ @home-assistant/core
|
||||
@@ -1677,6 +1679,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/uptime_kuma/ @tr4nt0r
|
||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/homeassistant/components/usage_prediction/ @home-assistant/core
|
||||
/tests/components/usage_prediction/ @home-assistant/core
|
||||
/homeassistant/components/usb/ @bdraco
|
||||
/tests/components/usb/ @bdraco
|
||||
/homeassistant/components/usgs_earthquakes_feed/ @exxamalte
|
||||
|
@@ -8,7 +8,7 @@ import threading
|
||||
from typing import IO, cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION, CONTENT_TYPE
|
||||
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
||||
from multidict import istr
|
||||
|
||||
@@ -76,7 +76,8 @@ class DownloadBackupView(HomeAssistantView):
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
headers = {
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar",
|
||||
CONTENT_TYPE: "application/x-tar",
|
||||
}
|
||||
|
||||
try:
|
||||
|
@@ -19,6 +19,7 @@
|
||||
"ssdp",
|
||||
"stream",
|
||||
"sun",
|
||||
"usage_prediction",
|
||||
"usb",
|
||||
"webhook",
|
||||
"zeroconf"
|
||||
|
@@ -43,3 +43,5 @@ class DelugeSensorType(enum.StrEnum):
|
||||
UPLOAD_SPEED_SENSOR = "upload_speed"
|
||||
PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR = "protocol_traffic_upload_speed"
|
||||
PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR = "protocol_traffic_download_speed"
|
||||
DOWNLOADING_COUNT_SENSOR = "downloading_count"
|
||||
SEEDING_COUNT_SENSOR = "seeding_count"
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
from ssl import SSLError
|
||||
from typing import Any
|
||||
@@ -14,11 +15,22 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import LOGGER, DelugeGetSessionStatusKeys
|
||||
from .const import LOGGER, DelugeGetSessionStatusKeys, DelugeSensorType
|
||||
|
||||
type DelugeConfigEntry = ConfigEntry[DelugeDataUpdateCoordinator]
|
||||
|
||||
|
||||
def count_states(data: dict[str, Any]) -> dict[str, int]:
|
||||
"""Count the states of the provided torrents."""
|
||||
|
||||
counts = Counter(torrent[b"state"].decode() for torrent in data.values())
|
||||
|
||||
return {
|
||||
DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value: counts.get("Downloading", 0),
|
||||
DelugeSensorType.SEEDING_COUNT_SENSOR.value: counts.get("Seeding", 0),
|
||||
}
|
||||
|
||||
|
||||
class DelugeDataUpdateCoordinator(
|
||||
DataUpdateCoordinator[dict[Platform, dict[str, Any]]]
|
||||
):
|
||||
@@ -39,19 +51,22 @@ class DelugeDataUpdateCoordinator(
|
||||
)
|
||||
self.api = api
|
||||
|
||||
async def _async_update_data(self) -> dict[Platform, dict[str, Any]]:
|
||||
"""Get the latest data from Deluge and updates the state."""
|
||||
def _get_deluge_data(self):
|
||||
"""Get the latest data from Deluge."""
|
||||
|
||||
data = {}
|
||||
try:
|
||||
_data = await self.hass.async_add_executor_job(
|
||||
self.api.call,
|
||||
data["session_status"] = self.api.call(
|
||||
"core.get_session_status",
|
||||
[iter_member.value for iter_member in list(DelugeGetSessionStatusKeys)],
|
||||
)
|
||||
data[Platform.SENSOR] = {k.decode(): v for k, v in _data.items()}
|
||||
data[Platform.SWITCH] = await self.hass.async_add_executor_job(
|
||||
self.api.call, "core.get_torrents_status", {}, ["paused"]
|
||||
data["torrents_status_state"] = self.api.call(
|
||||
"core.get_torrents_status", {}, ["state"]
|
||||
)
|
||||
data["torrents_status_paused"] = self.api.call(
|
||||
"core.get_torrents_status", {}, ["paused"]
|
||||
)
|
||||
|
||||
except (
|
||||
ConnectionRefusedError,
|
||||
TimeoutError,
|
||||
@@ -66,4 +81,18 @@ class DelugeDataUpdateCoordinator(
|
||||
) from ex
|
||||
LOGGER.error("Unknown error connecting to Deluge: %s", ex)
|
||||
raise
|
||||
|
||||
return data
|
||||
|
||||
async def _async_update_data(self) -> dict[Platform, dict[str, Any]]:
|
||||
"""Get the latest data from Deluge and updates the state."""
|
||||
|
||||
deluge_data = await self.hass.async_add_executor_job(self._get_deluge_data)
|
||||
|
||||
data = {}
|
||||
data[Platform.SENSOR] = {
|
||||
k.decode(): v for k, v in deluge_data["session_status"].items()
|
||||
}
|
||||
data[Platform.SENSOR].update(count_states(deluge_data["torrents_status_state"]))
|
||||
data[Platform.SWITCH] = deluge_data["torrents_status_paused"]
|
||||
return data
|
||||
|
12
homeassistant/components/deluge/icons.json
Normal file
12
homeassistant/components/deluge/icons.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"downloading_count": {
|
||||
"default": "mdi:download"
|
||||
},
|
||||
"seeding_count": {
|
||||
"default": "mdi:upload"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -110,6 +110,18 @@ SENSOR_TYPES: tuple[DelugeSensorEntityDescription, ...] = (
|
||||
data, DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR.value
|
||||
),
|
||||
),
|
||||
DelugeSensorEntityDescription(
|
||||
key=DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value,
|
||||
translation_key=DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
value=lambda data: data[DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value],
|
||||
),
|
||||
DelugeSensorEntityDescription(
|
||||
key=DelugeSensorType.SEEDING_COUNT_SENSOR.value,
|
||||
translation_key=DelugeSensorType.SEEDING_COUNT_SENSOR.value,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
value=lambda data: data[DelugeSensorType.SEEDING_COUNT_SENSOR.value],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -36,6 +36,10 @@
|
||||
"idle": "[%key:common::state::idle%]"
|
||||
}
|
||||
},
|
||||
"downloading_count": {
|
||||
"name": "Downloading count",
|
||||
"unit_of_measurement": "torrents"
|
||||
},
|
||||
"download_speed": {
|
||||
"name": "Download speed"
|
||||
},
|
||||
@@ -45,6 +49,10 @@
|
||||
"protocol_traffic_upload_speed": {
|
||||
"name": "Protocol traffic upload speed"
|
||||
},
|
||||
"seeding_count": {
|
||||
"name": "Seeding count",
|
||||
"unit_of_measurement": "[%key:component::deluge::entity::sensor::downloading_count::unit_of_measurement%]"
|
||||
},
|
||||
"upload_speed": {
|
||||
"name": "Upload speed"
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/emoncms",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pyemoncms==0.1.2"]
|
||||
"requirements": ["pyemoncms==0.1.3"]
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/emoncms_history",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyemoncms==0.1.2"]
|
||||
"requirements": ["pyemoncms==0.1.3"]
|
||||
}
|
||||
|
39
homeassistant/components/geocaching/entity.py
Normal file
39
homeassistant/components/geocaching/entity.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Sensor entities for Geocaching."""
|
||||
|
||||
from typing import cast
|
||||
|
||||
from geocachingapi.models import GeocachingCache
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import GeocachingDataUpdateCoordinator
|
||||
|
||||
|
||||
# Base class for all platforms
|
||||
class GeocachingBaseEntity(CoordinatorEntity[GeocachingDataUpdateCoordinator]):
|
||||
"""Base class for Geocaching sensors."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
|
||||
# Base class for cache entities
|
||||
class GeocachingCacheEntity(GeocachingBaseEntity):
|
||||
"""Base class for Geocaching cache entities."""
|
||||
|
||||
def __init__(
|
||||
self, coordinator: GeocachingDataUpdateCoordinator, cache: GeocachingCache
|
||||
) -> None:
|
||||
"""Initialize the Geocaching cache entity."""
|
||||
super().__init__(coordinator)
|
||||
self.cache = cache
|
||||
|
||||
# A device can have multiple entities, and for a cache which requires multiple entities we want to group them together.
|
||||
# Therefore, we create a device for each cache, which holds all related entities.
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=f"Geocache {cache.name}",
|
||||
identifiers={(DOMAIN, cast(str, cache.reference_code))},
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
manufacturer=cache.owner.username,
|
||||
)
|
@@ -15,6 +15,24 @@
|
||||
},
|
||||
"awarded_favorite_points": {
|
||||
"default": "mdi:heart"
|
||||
},
|
||||
"cache_name": {
|
||||
"default": "mdi:label"
|
||||
},
|
||||
"cache_owner": {
|
||||
"default": "mdi:account"
|
||||
},
|
||||
"cache_found_date": {
|
||||
"default": "mdi:calendar-search"
|
||||
},
|
||||
"cache_found": {
|
||||
"default": "mdi:package-variant-closed-check"
|
||||
},
|
||||
"cache_favorite_points": {
|
||||
"default": "mdi:star-check"
|
||||
},
|
||||
"cache_hidden_date": {
|
||||
"default": "mdi:calendar-badge"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -4,18 +4,25 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import datetime
|
||||
from typing import cast
|
||||
|
||||
from geocachingapi.models import GeocachingStatus
|
||||
from geocachingapi.models import GeocachingCache, GeocachingStatus
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import GeocachingConfigEntry, GeocachingDataUpdateCoordinator
|
||||
from .entity import GeocachingBaseEntity, GeocachingCacheEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -25,43 +32,63 @@ class GeocachingSensorEntityDescription(SensorEntityDescription):
|
||||
value_fn: Callable[[GeocachingStatus], str | int | None]
|
||||
|
||||
|
||||
SENSORS: tuple[GeocachingSensorEntityDescription, ...] = (
|
||||
PROFILE_SENSORS: tuple[GeocachingSensorEntityDescription, ...] = (
|
||||
GeocachingSensorEntityDescription(
|
||||
key="find_count",
|
||||
translation_key="find_count",
|
||||
native_unit_of_measurement="caches",
|
||||
value_fn=lambda status: status.user.find_count,
|
||||
),
|
||||
GeocachingSensorEntityDescription(
|
||||
key="hide_count",
|
||||
translation_key="hide_count",
|
||||
native_unit_of_measurement="caches",
|
||||
entity_registry_visible_default=False,
|
||||
value_fn=lambda status: status.user.hide_count,
|
||||
),
|
||||
GeocachingSensorEntityDescription(
|
||||
key="favorite_points",
|
||||
translation_key="favorite_points",
|
||||
native_unit_of_measurement="points",
|
||||
entity_registry_visible_default=False,
|
||||
value_fn=lambda status: status.user.favorite_points,
|
||||
),
|
||||
GeocachingSensorEntityDescription(
|
||||
key="souvenir_count",
|
||||
translation_key="souvenir_count",
|
||||
native_unit_of_measurement="souvenirs",
|
||||
value_fn=lambda status: status.user.souvenir_count,
|
||||
),
|
||||
GeocachingSensorEntityDescription(
|
||||
key="awarded_favorite_points",
|
||||
translation_key="awarded_favorite_points",
|
||||
native_unit_of_measurement="points",
|
||||
entity_registry_visible_default=False,
|
||||
value_fn=lambda status: status.user.awarded_favorite_points,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GeocachingCacheSensorDescription(SensorEntityDescription):
|
||||
"""Define Sensor entity description class."""
|
||||
|
||||
value_fn: Callable[[GeocachingCache], StateType | datetime.date]
|
||||
|
||||
|
||||
CACHE_SENSORS: tuple[GeocachingCacheSensorDescription, ...] = (
|
||||
GeocachingCacheSensorDescription(
|
||||
key="found_date",
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
value_fn=lambda cache: cache.found_date_time,
|
||||
),
|
||||
GeocachingCacheSensorDescription(
|
||||
key="favorite_points",
|
||||
value_fn=lambda cache: cache.favorite_points,
|
||||
),
|
||||
GeocachingCacheSensorDescription(
|
||||
key="hidden_date",
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
value_fn=lambda cache: cache.hidden_date,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: GeocachingConfigEntry,
|
||||
@@ -69,14 +96,68 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up a Geocaching sensor entry."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
GeocachingSensor(coordinator, description) for description in SENSORS
|
||||
|
||||
entities: list[Entity] = []
|
||||
|
||||
entities.extend(
|
||||
GeocachingProfileSensor(coordinator, description)
|
||||
for description in PROFILE_SENSORS
|
||||
)
|
||||
|
||||
status = coordinator.data
|
||||
|
||||
class GeocachingSensor(
|
||||
CoordinatorEntity[GeocachingDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
# Add entities for tracked caches
|
||||
entities.extend(
|
||||
GeoEntityCacheSensorEntity(coordinator, cache, description)
|
||||
for cache in status.tracked_caches
|
||||
for description in CACHE_SENSORS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
# Base class for a cache entity.
|
||||
# Sets the device, ID and translation settings to correctly group the entity to the correct cache device and give it the correct name.
|
||||
class GeoEntityBaseCache(GeocachingCacheEntity, SensorEntity):
|
||||
"""Base class for cache entities."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: GeocachingDataUpdateCoordinator,
|
||||
cache: GeocachingCache,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Initialize the Geocaching sensor."""
|
||||
super().__init__(coordinator, cache)
|
||||
|
||||
self._attr_unique_id = f"{cache.reference_code}_{key}"
|
||||
|
||||
# The translation key determines the name of the entity as this is the lookup for the `strings.json` file.
|
||||
self._attr_translation_key = f"cache_{key}"
|
||||
|
||||
|
||||
class GeoEntityCacheSensorEntity(GeoEntityBaseCache, SensorEntity):
|
||||
"""Representation of a cache sensor."""
|
||||
|
||||
entity_description: GeocachingCacheSensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: GeocachingDataUpdateCoordinator,
|
||||
cache: GeocachingCache,
|
||||
description: GeocachingCacheSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize the Geocaching sensor."""
|
||||
super().__init__(coordinator, cache, description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime.date:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.cache)
|
||||
|
||||
|
||||
class GeocachingProfileSensor(GeocachingBaseEntity, SensorEntity):
|
||||
"""Representation of a Sensor."""
|
||||
|
||||
entity_description: GeocachingSensorEntityDescription
|
||||
|
@@ -33,11 +33,36 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"find_count": { "name": "Total finds" },
|
||||
"hide_count": { "name": "Total hides" },
|
||||
"favorite_points": { "name": "Favorite points" },
|
||||
"souvenir_count": { "name": "Total souvenirs" },
|
||||
"awarded_favorite_points": { "name": "Awarded favorite points" }
|
||||
"find_count": {
|
||||
"name": "Total finds",
|
||||
"unit_of_measurement": "caches"
|
||||
},
|
||||
"hide_count": {
|
||||
"name": "Total hides",
|
||||
"unit_of_measurement": "caches"
|
||||
},
|
||||
"favorite_points": {
|
||||
"name": "Favorite points",
|
||||
"unit_of_measurement": "points"
|
||||
},
|
||||
"souvenir_count": {
|
||||
"name": "Total souvenirs",
|
||||
"unit_of_measurement": "souvenirs"
|
||||
},
|
||||
"awarded_favorite_points": {
|
||||
"name": "Awarded favorite points",
|
||||
"unit_of_measurement": "points"
|
||||
},
|
||||
"cache_found_date": {
|
||||
"name": "Found date"
|
||||
},
|
||||
"cache_favorite_points": {
|
||||
"name": "Favorite points",
|
||||
"unit_of_measurement": "points"
|
||||
},
|
||||
"cache_hidden_date": {
|
||||
"name": "Hidden date"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -112,11 +112,14 @@ PLACEHOLDER_KEY_ADDON = "addon"
|
||||
PLACEHOLDER_KEY_ADDON_URL = "addon_url"
|
||||
PLACEHOLDER_KEY_REFERENCE = "reference"
|
||||
PLACEHOLDER_KEY_COMPONENTS = "components"
|
||||
PLACEHOLDER_KEY_FREE_SPACE = "free_space"
|
||||
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL = "issue_addon_boot_fail"
|
||||
ISSUE_KEY_SYSTEM_DOCKER_CONFIG = "issue_system_docker_config"
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING = "issue_addon_detached_addon_missing"
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED = "issue_addon_detached_addon_removed"
|
||||
ISSUE_KEY_ADDON_PWNED = "issue_addon_pwned"
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE = "issue_system_free_space"
|
||||
|
||||
CORE_CONTAINER = "homeassistant"
|
||||
SUPERVISOR_CONTAINER = "hassio_supervisor"
|
||||
@@ -137,6 +140,24 @@ KEY_TO_UPDATE_TYPES: dict[str, set[str]] = {
|
||||
|
||||
REQUEST_REFRESH_DELAY = 10
|
||||
|
||||
HELP_URLS = {
|
||||
"help_url": "https://www.home-assistant.io/help/",
|
||||
"community_url": "https://community.home-assistant.io/",
|
||||
}
|
||||
|
||||
EXTRA_PLACEHOLDERS = {
|
||||
"issue_mount_mount_failed": {
|
||||
"storage_url": "/config/storage",
|
||||
},
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED: HELP_URLS,
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE: {
|
||||
"more_info_free_space": "https://www.home-assistant.io/more-info/free-space",
|
||||
},
|
||||
ISSUE_KEY_ADDON_PWNED: {
|
||||
"more_info_pwned": "https://www.home-assistant.io/more-info/pwned-passwords",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class SupervisorEntityModel(StrEnum):
|
||||
"""Supervisor entity model."""
|
||||
|
@@ -41,17 +41,21 @@ from .const import (
|
||||
EVENT_SUPERVISOR_EVENT,
|
||||
EVENT_SUPERVISOR_UPDATE,
|
||||
EVENT_SUPPORTED_CHANGED,
|
||||
EXTRA_PLACEHOLDERS,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE,
|
||||
PLACEHOLDER_KEY_ADDON,
|
||||
PLACEHOLDER_KEY_ADDON_URL,
|
||||
PLACEHOLDER_KEY_FREE_SPACE,
|
||||
PLACEHOLDER_KEY_REFERENCE,
|
||||
REQUEST_REFRESH_DELAY,
|
||||
UPDATE_KEY_SUPERVISOR,
|
||||
)
|
||||
from .coordinator import get_addons_info
|
||||
from .coordinator import get_addons_info, get_host_info
|
||||
from .handler import HassIO, get_supervisor_client
|
||||
|
||||
ISSUE_KEY_UNHEALTHY = "unhealthy"
|
||||
@@ -78,6 +82,8 @@ ISSUE_KEYS_FOR_REPAIRS = {
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
"issue_system_disk_lifetime",
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -241,11 +247,17 @@ class SupervisorIssues:
|
||||
def add_issue(self, issue: Issue) -> None:
|
||||
"""Add or update an issue in the list. Create or update a repair if necessary."""
|
||||
if issue.key in ISSUE_KEYS_FOR_REPAIRS:
|
||||
placeholders: dict[str, str] | None = None
|
||||
if issue.reference:
|
||||
placeholders = {PLACEHOLDER_KEY_REFERENCE: issue.reference}
|
||||
placeholders: dict[str, str] = {}
|
||||
if not issue.suggestions and issue.key in EXTRA_PLACEHOLDERS:
|
||||
placeholders |= EXTRA_PLACEHOLDERS[issue.key]
|
||||
|
||||
if issue.key == ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING:
|
||||
if issue.reference:
|
||||
placeholders[PLACEHOLDER_KEY_REFERENCE] = issue.reference
|
||||
|
||||
if issue.key in {
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
}:
|
||||
placeholders[PLACEHOLDER_KEY_ADDON_URL] = (
|
||||
f"/hassio/addon/{issue.reference}"
|
||||
)
|
||||
@@ -257,6 +269,19 @@ class SupervisorIssues:
|
||||
else:
|
||||
placeholders[PLACEHOLDER_KEY_ADDON] = issue.reference
|
||||
|
||||
elif issue.key == ISSUE_KEY_SYSTEM_FREE_SPACE:
|
||||
host_info = get_host_info(self._hass)
|
||||
if (
|
||||
host_info
|
||||
and "data" in host_info
|
||||
and "disk_free" in host_info["data"]
|
||||
):
|
||||
placeholders[PLACEHOLDER_KEY_FREE_SPACE] = str(
|
||||
host_info["data"]["disk_free"]
|
||||
)
|
||||
else:
|
||||
placeholders[PLACEHOLDER_KEY_FREE_SPACE] = "<2"
|
||||
|
||||
async_create_issue(
|
||||
self._hass,
|
||||
DOMAIN,
|
||||
@@ -264,7 +289,7 @@ class SupervisorIssues:
|
||||
is_fixable=bool(issue.suggestions),
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=issue.key,
|
||||
translation_placeholders=placeholders,
|
||||
translation_placeholders=placeholders or None,
|
||||
)
|
||||
|
||||
self._issues[issue.uuid] = issue
|
||||
|
@@ -16,8 +16,10 @@ from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from . import get_addons_info, get_issues_info
|
||||
from .const import (
|
||||
EXTRA_PLACEHOLDERS,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
|
||||
PLACEHOLDER_KEY_ADDON,
|
||||
PLACEHOLDER_KEY_COMPONENTS,
|
||||
@@ -26,11 +28,6 @@ from .const import (
|
||||
from .handler import get_supervisor_client
|
||||
from .issues import Issue, Suggestion
|
||||
|
||||
HELP_URLS = {
|
||||
"help_url": "https://www.home-assistant.io/help/",
|
||||
"community_url": "https://community.home-assistant.io/",
|
||||
}
|
||||
|
||||
SUGGESTION_CONFIRMATION_REQUIRED = {
|
||||
"addon_execute_remove",
|
||||
"system_adopt_data_disk",
|
||||
@@ -38,14 +35,6 @@ SUGGESTION_CONFIRMATION_REQUIRED = {
|
||||
}
|
||||
|
||||
|
||||
EXTRA_PLACEHOLDERS = {
|
||||
"issue_mount_mount_failed": {
|
||||
"storage_url": "/config/storage",
|
||||
},
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED: HELP_URLS,
|
||||
}
|
||||
|
||||
|
||||
class SupervisorIssueRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
|
||||
@@ -219,6 +208,7 @@ async def async_create_fix_flow(
|
||||
if issue and issue.key in {
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
}:
|
||||
return AddonIssueRepairFlow(hass, issue_id)
|
||||
|
||||
|
@@ -52,6 +52,10 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"issue_addon_pwned": {
|
||||
"title": "Insecure secrets detected in add-on configuration",
|
||||
"description": "Add-on {addon} uses secrets/passwords in its configuration which are detected as not secure. See [pwned passwords and secrets]({more_info_pwned}) for more information on this issue."
|
||||
},
|
||||
"issue_mount_mount_failed": {
|
||||
"title": "Network storage device failed",
|
||||
"fix_flow": {
|
||||
@@ -119,6 +123,10 @@
|
||||
"title": "Disk lifetime exceeding 90%",
|
||||
"description": "The data disk has exceeded 90% of its expected lifespan. The disk may soon malfunction which can lead to data loss. You should replace it soon and migrate your data."
|
||||
},
|
||||
"issue_system_free_space": {
|
||||
"title": "Data disk is running low on free space",
|
||||
"description": "The data disk has only {free_space}GB free space left. This may cause issues with system stability and interfere with functionality such as backups and updates. See [clear up storage]({more_info_free_space}) for tips on how to free up space."
|
||||
},
|
||||
"unhealthy": {
|
||||
"title": "Unhealthy system - {reason}",
|
||||
"description": "System is currently unhealthy due to {reason}. For troubleshooting information, select Learn more."
|
||||
|
@@ -36,6 +36,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.variance import ignore_variance
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HomeWizardConfigEntry, HWEnergyDeviceUpdateCoordinator
|
||||
@@ -66,15 +67,13 @@ def to_percentage(value: float | None) -> float | None:
|
||||
return value * 100 if value is not None else None
|
||||
|
||||
|
||||
def time_to_datetime(value: int | None) -> datetime | None:
|
||||
"""Convert seconds to datetime when value is not None."""
|
||||
return (
|
||||
utcnow().replace(microsecond=0) - timedelta(seconds=value)
|
||||
if value is not None
|
||||
else None
|
||||
)
|
||||
def uptime_to_datetime(value: int) -> datetime:
|
||||
"""Convert seconds to datetime timestamp."""
|
||||
return utcnow().replace(microsecond=0) - timedelta(seconds=value)
|
||||
|
||||
|
||||
uptime_to_stable_datetime = ignore_variance(uptime_to_datetime, timedelta(minutes=5))
|
||||
|
||||
SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="smr_version",
|
||||
@@ -647,7 +646,11 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
lambda data: data.system is not None and data.system.uptime_s is not None
|
||||
),
|
||||
value_fn=(
|
||||
lambda data: time_to_datetime(data.system.uptime_s) if data.system else None
|
||||
lambda data: (
|
||||
uptime_to_stable_datetime(data.system.uptime_s)
|
||||
if data.system is not None and data.system.uptime_s is not None
|
||||
else None
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
@@ -18,6 +18,6 @@
|
||||
},
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiopvapi"],
|
||||
"requirements": ["aiopvapi==3.1.1"],
|
||||
"requirements": ["aiopvapi==3.2.1"],
|
||||
"zeroconf": ["_powerview._tcp.local.", "_PowerView-G3._tcp.local."]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["imgw_pib==1.5.4"]
|
||||
"requirements": ["imgw_pib==1.5.6"]
|
||||
}
|
||||
|
@@ -42,7 +42,7 @@
|
||||
"characteristic_missing": "The device is either already connected to Wi-Fi, or no longer able to connect to Wi-Fi. If you want to connect it to another network, try factory resetting it first.",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"provision_successful": "The device has successfully connected to the Wi-Fi network.",
|
||||
"provision_successful_url": "The device has successfully connected to the Wi-Fi network.\n\nPlease visit {url} to finish setup.",
|
||||
"provision_successful_url": "The device has successfully connected to the Wi-Fi network.\n\nPlease finish the setup by following the [setup instructions]({url}).",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,9 @@
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from packaging import version
|
||||
from pylamarzocco import (
|
||||
LaMarzoccoBluetoothClient,
|
||||
@@ -11,6 +13,7 @@ from pylamarzocco import (
|
||||
)
|
||||
from pylamarzocco.const import FirmwareType
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
from pylamarzocco.util import InstallationKey, generate_installation_key
|
||||
|
||||
from homeassistant.components.bluetooth import async_discovered_service_info
|
||||
from homeassistant.const import (
|
||||
@@ -19,13 +22,14 @@ from homeassistant.const import (
|
||||
CONF_TOKEN,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
__version__,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_USE_BLUETOOTH, DOMAIN
|
||||
from .const import CONF_INSTALLATION_KEY, CONF_USE_BLUETOOTH, DOMAIN
|
||||
from .coordinator import (
|
||||
LaMarzoccoConfigEntry,
|
||||
LaMarzoccoConfigUpdateCoordinator,
|
||||
@@ -60,7 +64,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
cloud_client = LaMarzoccoCloudClient(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
client=async_create_clientsession(hass),
|
||||
installation_key=InstallationKey.from_json(entry.data[CONF_INSTALLATION_KEY]),
|
||||
client=create_client_session(hass),
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -166,45 +171,50 @@ async def async_migrate_entry(
|
||||
hass: HomeAssistant, entry: LaMarzoccoConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate config entry."""
|
||||
if entry.version > 3:
|
||||
if entry.version > 4:
|
||||
# guard against downgrade from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1:
|
||||
if entry.version in (1, 2):
|
||||
_LOGGER.error(
|
||||
"Migration from version 1 is no longer supported, please remove and re-add the integration"
|
||||
"Migration from version 1 or 2 is no longer supported, please remove and re-add the integration"
|
||||
)
|
||||
return False
|
||||
|
||||
if entry.version == 2:
|
||||
if entry.version == 3:
|
||||
installation_key = generate_installation_key(str(uuid.uuid4()).lower())
|
||||
cloud_client = LaMarzoccoCloudClient(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
installation_key=installation_key,
|
||||
client=create_client_session(hass),
|
||||
)
|
||||
try:
|
||||
things = await cloud_client.list_things()
|
||||
await cloud_client.async_register_client()
|
||||
except (AuthFail, RequestNotSuccessful) as exc:
|
||||
_LOGGER.error("Migration failed with error %s", exc)
|
||||
return False
|
||||
v3_data = {
|
||||
CONF_USERNAME: entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: entry.data[CONF_PASSWORD],
|
||||
CONF_TOKEN: next(
|
||||
(
|
||||
thing.ble_auth_token
|
||||
for thing in things
|
||||
if thing.serial_number == entry.unique_id
|
||||
),
|
||||
None,
|
||||
),
|
||||
}
|
||||
if CONF_MAC in entry.data:
|
||||
v3_data[CONF_MAC] = entry.data[CONF_MAC]
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=v3_data,
|
||||
version=3,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_INSTALLATION_KEY: installation_key.to_json(),
|
||||
},
|
||||
version=4,
|
||||
)
|
||||
_LOGGER.debug("Migrated La Marzocco config entry to version 2")
|
||||
_LOGGER.debug("Migrated La Marzocco config entry to version 4")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def create_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
"""Create a ClientSession with La Marzocco specific headers."""
|
||||
|
||||
return async_create_clientsession(
|
||||
hass,
|
||||
headers={
|
||||
"X-Client": "HOME_ASSISTANT",
|
||||
"X-Client-Build": __version__,
|
||||
},
|
||||
)
|
||||
|
@@ -5,11 +5,13 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
import uuid
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from pylamarzocco import LaMarzoccoCloudClient
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
from pylamarzocco.models import Thing
|
||||
from pylamarzocco.util import InstallationKey, generate_installation_key
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
@@ -33,7 +35,6 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
@@ -45,7 +46,8 @@ from homeassistant.helpers.selector import (
|
||||
)
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import CONF_USE_BLUETOOTH, DOMAIN
|
||||
from . import create_client_session
|
||||
from .const import CONF_INSTALLATION_KEY, CONF_USE_BLUETOOTH, DOMAIN
|
||||
from .coordinator import LaMarzoccoConfigEntry
|
||||
|
||||
CONF_MACHINE = "machine"
|
||||
@@ -57,9 +59,10 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for La Marzocco."""
|
||||
|
||||
VERSION = 3
|
||||
VERSION = 4
|
||||
|
||||
_client: ClientSession
|
||||
_installation_key: InstallationKey
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
@@ -83,13 +86,18 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
**user_input,
|
||||
}
|
||||
|
||||
self._client = async_create_clientsession(self.hass)
|
||||
self._client = create_client_session(self.hass)
|
||||
self._installation_key = generate_installation_key(
|
||||
str(uuid.uuid4()).lower()
|
||||
)
|
||||
cloud_client = LaMarzoccoCloudClient(
|
||||
username=data[CONF_USERNAME],
|
||||
password=data[CONF_PASSWORD],
|
||||
client=self._client,
|
||||
installation_key=self._installation_key,
|
||||
)
|
||||
try:
|
||||
await cloud_client.async_register_client()
|
||||
things = await cloud_client.list_things()
|
||||
except AuthFail:
|
||||
_LOGGER.debug("Server rejected login credentials")
|
||||
@@ -184,6 +192,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=selected_device.name,
|
||||
data={
|
||||
**self._config,
|
||||
CONF_INSTALLATION_KEY: self._installation_key.to_json(),
|
||||
CONF_TOKEN: self._things[serial_number].ble_auth_token,
|
||||
},
|
||||
)
|
||||
|
@@ -5,3 +5,4 @@ from typing import Final
|
||||
DOMAIN: Final = "lamarzocco"
|
||||
|
||||
CONF_USE_BLUETOOTH: Final = "use_bluetooth"
|
||||
CONF_INSTALLATION_KEY: Final = "installation_key"
|
||||
|
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.0.11"]
|
||||
"requirements": ["pylamarzocco==2.1.0"]
|
||||
}
|
||||
|
@@ -8,7 +8,7 @@ import logging
|
||||
import ssl
|
||||
from typing import Any, cast
|
||||
|
||||
from pylutron_caseta import BUTTON_STATUS_PRESSED
|
||||
from pylutron_caseta import BUTTON_STATUS_MULTITAP, BUTTON_STATUS_PRESSED
|
||||
from pylutron_caseta.smartbridge import Smartbridge
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -25,6 +25,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
ACTION_MULTITAP,
|
||||
ACTION_PRESS,
|
||||
ACTION_RELEASE,
|
||||
ATTR_ACTION,
|
||||
@@ -448,6 +449,8 @@ def _async_subscribe_keypad_events(
|
||||
|
||||
if event_type == BUTTON_STATUS_PRESSED:
|
||||
action = ACTION_PRESS
|
||||
elif event_type == BUTTON_STATUS_MULTITAP:
|
||||
action = ACTION_MULTITAP
|
||||
else:
|
||||
action = ACTION_RELEASE
|
||||
|
||||
|
@@ -29,6 +29,7 @@ ATTR_DEVICE_NAME = "device_name"
|
||||
ATTR_AREA_NAME = "area_name"
|
||||
ATTR_ACTION = "action"
|
||||
|
||||
ACTION_MULTITAP = "multi_tap"
|
||||
ACTION_PRESS = "press"
|
||||
ACTION_RELEASE = "release"
|
||||
|
||||
|
@@ -21,6 +21,7 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
ACTION_MULTITAP,
|
||||
ACTION_PRESS,
|
||||
ACTION_RELEASE,
|
||||
ATTR_ACTION,
|
||||
@@ -39,7 +40,7 @@ def _reverse_dict(forward_dict: dict) -> dict:
|
||||
return {v: k for k, v in forward_dict.items()}
|
||||
|
||||
|
||||
SUPPORTED_INPUTS_EVENTS_TYPES = [ACTION_PRESS, ACTION_RELEASE]
|
||||
SUPPORTED_INPUTS_EVENTS_TYPES = [ACTION_PRESS, ACTION_MULTITAP, ACTION_RELEASE]
|
||||
|
||||
LUTRON_BUTTON_TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend(
|
||||
{
|
||||
|
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pylutron_caseta"],
|
||||
"requirements": ["pylutron-caseta==0.24.0"],
|
||||
"requirements": ["pylutron-caseta==0.25.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_lutron._tcp.local.",
|
||||
|
@@ -15,8 +15,3 @@ ID_TYPE_DEVICE_ID = "deviceid"
|
||||
ID_TYPE_SERIAL = "serial"
|
||||
|
||||
FEATUREMAP_ATTRIBUTE_ID = 65532
|
||||
|
||||
# vacuum entity service actions
|
||||
SERVICE_GET_AREAS = "get_areas" # get SupportedAreas and SupportedMaps
|
||||
SERVICE_SELECT_AREAS = "select_areas" # call SelectAreas Matter command
|
||||
SERVICE_CLEAN_AREAS = "clean_areas" # call SelectAreas Matter command and start RVC
|
||||
|
@@ -150,16 +150,5 @@
|
||||
"default": "mdi:ev-station"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"clean_areas": {
|
||||
"service": "mdi:robot-vacuum"
|
||||
},
|
||||
"get_areas": {
|
||||
"service": "mdi:map"
|
||||
},
|
||||
"select_areas": {
|
||||
"service": "mdi:map"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,24 +0,0 @@
|
||||
# Service descriptions for Matter integration
|
||||
|
||||
get_areas:
|
||||
target:
|
||||
entity:
|
||||
domain: vacuum
|
||||
|
||||
select_areas:
|
||||
target:
|
||||
entity:
|
||||
domain: vacuum
|
||||
fields:
|
||||
areas:
|
||||
required: true
|
||||
example: [1, 3]
|
||||
|
||||
clean_areas:
|
||||
target:
|
||||
entity:
|
||||
domain: vacuum
|
||||
fields:
|
||||
areas:
|
||||
required: true
|
||||
example: [1, 3]
|
@@ -548,30 +548,6 @@
|
||||
"description": "The Matter device to add to the other Matter network."
|
||||
}
|
||||
}
|
||||
},
|
||||
"get_areas": {
|
||||
"name": "Get areas",
|
||||
"description": "Returns a list of available areas and maps for robot vacuum cleaners."
|
||||
},
|
||||
"select_areas": {
|
||||
"name": "Select areas",
|
||||
"description": "Selects the specified areas for cleaning. The areas must be specified as a list of area IDs.",
|
||||
"fields": {
|
||||
"areas": {
|
||||
"name": "Areas",
|
||||
"description": "A list of area IDs to select."
|
||||
}
|
||||
}
|
||||
},
|
||||
"clean_areas": {
|
||||
"name": "Clean areas",
|
||||
"description": "Instructs the Matter vacuum cleaner to clean the specified areas.",
|
||||
"fields": {
|
||||
"areas": {
|
||||
"name": "Areas",
|
||||
"description": "A list of area IDs to clean."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -3,12 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntEnum
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from chip.clusters.Objects import NullValue
|
||||
from matter_server.client.models import device_types
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.vacuum import (
|
||||
StateVacuumEntity,
|
||||
@@ -18,25 +16,14 @@ from homeassistant.components.vacuum import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import SERVICE_CLEAN_AREAS, SERVICE_GET_AREAS, SERVICE_SELECT_AREAS
|
||||
from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
ATTR_CURRENT_AREA = "current_area"
|
||||
ATTR_CURRENT_AREA_NAME = "current_area_name"
|
||||
ATTR_SELECTED_AREAS = "selected_areas"
|
||||
|
||||
|
||||
class OperationalState(IntEnum):
|
||||
"""Operational State of the vacuum cleaner.
|
||||
@@ -69,33 +56,6 @@ async def async_setup_entry(
|
||||
"""Set up Matter vacuum platform from Config Entry."""
|
||||
matter = get_matter(hass)
|
||||
matter.register_platform_handler(Platform.VACUUM, async_add_entities)
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
|
||||
# This will call Entity.async_handle_get_areas
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_GET_AREAS,
|
||||
schema=None,
|
||||
func="async_handle_get_areas",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
# This will call Entity.async_handle_clean_areas
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_CLEAN_AREAS,
|
||||
schema={
|
||||
vol.Required("areas"): vol.All(cv.ensure_list, [cv.positive_int]),
|
||||
},
|
||||
func="async_handle_clean_areas",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
# This will call Entity.async_handle_select_areas
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SELECT_AREAS,
|
||||
schema={
|
||||
vol.Required("areas"): vol.All(cv.ensure_list, [cv.positive_int]),
|
||||
},
|
||||
func="async_handle_select_areas",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
|
||||
class MatterVacuum(MatterEntity, StateVacuumEntity):
|
||||
@@ -105,23 +65,9 @@ class MatterVacuum(MatterEntity, StateVacuumEntity):
|
||||
_supported_run_modes: (
|
||||
dict[int, clusters.RvcRunMode.Structs.ModeOptionStruct] | None
|
||||
) = None
|
||||
_attr_matter_areas: dict[str, Any] | None = None
|
||||
_attr_current_area: int | None = None
|
||||
_attr_current_area_name: str | None = None
|
||||
_attr_selected_areas: list[int] | None = None
|
||||
_attr_supported_maps: list[dict[str, Any]] | None = None
|
||||
entity_description: StateVacuumEntityDescription
|
||||
_platform_translation_key = "vacuum"
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes of the entity."""
|
||||
return {
|
||||
ATTR_CURRENT_AREA: self._attr_current_area,
|
||||
ATTR_CURRENT_AREA_NAME: self._attr_current_area_name,
|
||||
ATTR_SELECTED_AREAS: self._attr_selected_areas,
|
||||
}
|
||||
|
||||
def _get_run_mode_by_tag(
|
||||
self, tag: ModeTag
|
||||
) -> clusters.RvcRunMode.Structs.ModeOptionStruct | None:
|
||||
@@ -190,160 +136,10 @@ class MatterVacuum(MatterEntity, StateVacuumEntity):
|
||||
"""Pause the cleaning task."""
|
||||
await self.send_device_command(clusters.RvcOperationalState.Commands.Pause())
|
||||
|
||||
def async_get_areas(self, **kwargs: Any) -> dict[str, Any]:
|
||||
"""Get available area and map IDs from vacuum appliance."""
|
||||
|
||||
supported_areas = self.get_matter_attribute_value(
|
||||
clusters.ServiceArea.Attributes.SupportedAreas
|
||||
)
|
||||
if not supported_areas:
|
||||
raise HomeAssistantError("Can't get areas from the device.")
|
||||
|
||||
# Group by area_id: {area_id: {"map_id": ..., "name": ...}}
|
||||
areas = {}
|
||||
for area in supported_areas:
|
||||
area_id = getattr(area, "areaID", None)
|
||||
map_id = getattr(area, "mapID", None)
|
||||
location_name = None
|
||||
area_info = getattr(area, "areaInfo", None)
|
||||
if area_info is not None:
|
||||
location_info = getattr(area_info, "locationInfo", None)
|
||||
if location_info is not None:
|
||||
location_name = getattr(location_info, "locationName", None)
|
||||
if area_id is not None:
|
||||
areas[area_id] = {"map_id": map_id, "name": location_name}
|
||||
|
||||
# Optionally, also extract supported maps if available
|
||||
supported_maps = self.get_matter_attribute_value(
|
||||
clusters.ServiceArea.Attributes.SupportedMaps
|
||||
)
|
||||
maps = []
|
||||
if supported_maps:
|
||||
maps = [
|
||||
{
|
||||
"map_id": getattr(m, "mapID", None),
|
||||
"name": getattr(m, "name", None),
|
||||
}
|
||||
for m in supported_maps
|
||||
]
|
||||
|
||||
return {
|
||||
"areas": areas,
|
||||
"maps": maps,
|
||||
}
|
||||
|
||||
async def async_handle_get_areas(self, **kwargs: Any) -> ServiceResponse:
|
||||
"""Get available area and map IDs from vacuum appliance."""
|
||||
# Group by area_id: {area_id: {"map_id": ..., "name": ...}}
|
||||
areas = {}
|
||||
if self._attr_matter_areas is not None:
|
||||
for area in self._attr_matter_areas:
|
||||
area_id = getattr(area, "areaID", None)
|
||||
map_id = getattr(area, "mapID", None)
|
||||
location_name = None
|
||||
area_info = getattr(area, "areaInfo", None)
|
||||
if area_info is not None:
|
||||
location_info = getattr(area_info, "locationInfo", None)
|
||||
if location_info is not None:
|
||||
location_name = getattr(location_info, "locationName", None)
|
||||
if area_id is not None:
|
||||
if map_id is NullValue:
|
||||
areas[area_id] = {"name": location_name}
|
||||
else:
|
||||
areas[area_id] = {"map_id": map_id, "name": location_name}
|
||||
|
||||
# Optionally, also extract supported maps if available
|
||||
supported_maps = self.get_matter_attribute_value(
|
||||
clusters.ServiceArea.Attributes.SupportedMaps
|
||||
)
|
||||
maps = []
|
||||
if supported_maps != NullValue: # chip.clusters.Types.Nullable
|
||||
maps = [
|
||||
{
|
||||
"map_id": getattr(m, "mapID", None)
|
||||
if getattr(m, "mapID", None) != NullValue
|
||||
else None,
|
||||
"name": getattr(m, "name", None),
|
||||
}
|
||||
for m in supported_maps
|
||||
]
|
||||
|
||||
return cast(
|
||||
ServiceResponse,
|
||||
{
|
||||
"areas": areas,
|
||||
"maps": maps,
|
||||
},
|
||||
)
|
||||
return None
|
||||
|
||||
async def async_handle_select_areas(
|
||||
self, areas: list[int], **kwargs: Any
|
||||
) -> ServiceResponse:
|
||||
"""Select areas to clean."""
|
||||
selected_areas = areas
|
||||
# Matter command to the vacuum cleaner to select the areas.
|
||||
await self.send_device_command(
|
||||
clusters.ServiceArea.Commands.SelectAreas(newAreas=selected_areas)
|
||||
)
|
||||
# Return response indicating selected areas.
|
||||
return cast(
|
||||
ServiceResponse, {"status": "areas selected", "areas": selected_areas}
|
||||
)
|
||||
|
||||
async def async_handle_clean_areas(
|
||||
self, areas: list[int], **kwargs: Any
|
||||
) -> ServiceResponse:
|
||||
"""Start cleaning the specified areas."""
|
||||
# Matter command to the vacuum cleaner to select the areas.
|
||||
await self.send_device_command(
|
||||
clusters.ServiceArea.Commands.SelectAreas(newAreas=areas)
|
||||
)
|
||||
# Start the vacuum cleaner after selecting areas.
|
||||
await self.async_start()
|
||||
# Return response indicating selected areas.
|
||||
return cast(
|
||||
ServiceResponse, {"status": "cleaning areas selected", "areas": areas}
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
self._calculate_features()
|
||||
# ServiceArea: get areas from the device
|
||||
self._attr_matter_areas = self.get_matter_attribute_value(
|
||||
clusters.ServiceArea.Attributes.SupportedAreas
|
||||
)
|
||||
# optional CurrentArea attribute
|
||||
# pylint: disable=too-many-nested-blocks
|
||||
if self.get_matter_attribute_value(clusters.ServiceArea.Attributes.CurrentArea):
|
||||
current_area = self.get_matter_attribute_value(
|
||||
clusters.ServiceArea.Attributes.CurrentArea
|
||||
)
|
||||
# get areaInfo.locationInfo.locationName for current_area in SupportedAreas list
|
||||
area_name = None
|
||||
if self._attr_matter_areas:
|
||||
for area in self._attr_matter_areas:
|
||||
if getattr(area, "areaID", None) == current_area:
|
||||
area_info = getattr(area, "areaInfo", None)
|
||||
if area_info is not None:
|
||||
location_info = getattr(area_info, "locationInfo", None)
|
||||
if location_info is not None:
|
||||
area_name = getattr(location_info, "locationName", None)
|
||||
break
|
||||
self._attr_current_area = current_area
|
||||
self._attr_current_area_name = area_name
|
||||
else:
|
||||
self._attr_current_area = None
|
||||
self._attr_current_area_name = None
|
||||
|
||||
# optional SelectedAreas attribute
|
||||
if self.get_matter_attribute_value(
|
||||
clusters.ServiceArea.Attributes.SelectedAreas
|
||||
):
|
||||
self._attr_selected_areas = self.get_matter_attribute_value(
|
||||
clusters.ServiceArea.Attributes.SelectedAreas
|
||||
)
|
||||
# derive state from the run mode + operational state
|
||||
run_mode_raw: int = self.get_matter_attribute_value(
|
||||
clusters.RvcRunMode.Attributes.CurrentMode
|
||||
@@ -424,10 +220,6 @@ DISCOVERY_SCHEMAS = [
|
||||
clusters.RvcRunMode.Attributes.CurrentMode,
|
||||
clusters.RvcOperationalState.Attributes.OperationalState,
|
||||
),
|
||||
optional_attributes=(
|
||||
clusters.ServiceArea.Attributes.SelectedAreas,
|
||||
clusters.ServiceArea.Attributes.CurrentArea,
|
||||
),
|
||||
device_type=(device_types.RoboticVacuumCleaner,),
|
||||
allow_none_value=True,
|
||||
),
|
||||
|
@@ -73,7 +73,6 @@ ABBREVIATIONS = {
|
||||
"fan_mode_stat_t": "fan_mode_state_topic",
|
||||
"frc_upd": "force_update",
|
||||
"g_tpl": "green_template",
|
||||
"grp": "group",
|
||||
"hs_cmd_t": "hs_command_topic",
|
||||
"hs_cmd_tpl": "hs_command_template",
|
||||
"hs_stat_t": "hs_state_topic",
|
||||
|
@@ -10,7 +10,6 @@ from homeassistant.helpers import config_validation as cv
|
||||
from .const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
CONF_GROUP,
|
||||
CONF_QOS,
|
||||
CONF_RETAIN,
|
||||
CONF_STATE_TOPIC,
|
||||
@@ -24,7 +23,6 @@ from .util import valid_publish_topic, valid_qos_schema, valid_subscribe_topic
|
||||
SCHEMA_BASE = {
|
||||
vol.Optional(CONF_QOS, default=DEFAULT_QOS): valid_qos_schema,
|
||||
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string,
|
||||
vol.Optional(CONF_GROUP): vol.All(cv.ensure_list, [cv.string]),
|
||||
}
|
||||
|
||||
MQTT_BASE_SCHEMA = vol.Schema(SCHEMA_BASE)
|
||||
|
@@ -106,7 +106,6 @@ CONF_FLASH_TIME_SHORT = "flash_time_short"
|
||||
CONF_GET_POSITION_TEMPLATE = "position_template"
|
||||
CONF_GET_POSITION_TOPIC = "position_topic"
|
||||
CONF_GREEN_TEMPLATE = "green_template"
|
||||
CONF_GROUP = "group"
|
||||
CONF_HS_COMMAND_TEMPLATE = "hs_command_template"
|
||||
CONF_HS_COMMAND_TOPIC = "hs_command_topic"
|
||||
CONF_HS_STATE_TOPIC = "hs_state_topic"
|
||||
|
@@ -13,7 +13,6 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_CONFIGURATION_URL,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_HW_VERSION,
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
@@ -33,13 +32,7 @@ from homeassistant.const import (
|
||||
CONF_URL,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
HassJobType,
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.core import Event, HassJobType, HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.device_registry import (
|
||||
DeviceEntry,
|
||||
@@ -86,7 +79,6 @@ from .const import (
|
||||
CONF_ENABLED_BY_DEFAULT,
|
||||
CONF_ENCODING,
|
||||
CONF_ENTITY_PICTURE,
|
||||
CONF_GROUP,
|
||||
CONF_HW_VERSION,
|
||||
CONF_IDENTIFIERS,
|
||||
CONF_JSON_ATTRS_TEMPLATE,
|
||||
@@ -144,7 +136,6 @@ MQTT_ATTRIBUTES_BLOCKED = {
|
||||
"device_class",
|
||||
"device_info",
|
||||
"entity_category",
|
||||
"entity_id",
|
||||
"entity_picture",
|
||||
"entity_registry_enabled_default",
|
||||
"extra_state_attributes",
|
||||
@@ -476,74 +467,19 @@ class MqttAttributesMixin(Entity):
|
||||
|
||||
_attributes_extra_blocked: frozenset[str] = frozenset()
|
||||
_attr_tpl: Callable[[ReceivePayloadType], ReceivePayloadType] | None = None
|
||||
_default_group_icon: str | None = None
|
||||
_group_entity_ids: list[str] | None = None
|
||||
_message_callback: Callable[
|
||||
[MessageCallbackType, set[str] | None, ReceiveMessage], None
|
||||
]
|
||||
_process_update_extra_state_attributes: Callable[[dict[str, Any]], None]
|
||||
_monitor_member_updates_callback: CALLBACK_TYPE
|
||||
|
||||
def __init__(self, config: ConfigType) -> None:
|
||||
"""Initialize the JSON attributes mixin."""
|
||||
self._attributes_sub_state: dict[str, EntitySubscription] = {}
|
||||
self._attributes_config = config
|
||||
|
||||
def _monitor_member_updates(self) -> None:
|
||||
"""Update the group members if the entity registry is updated."""
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
async def _handle_entity_registry_updated(event: Event[Any]) -> None:
|
||||
"""Handle registry update event."""
|
||||
if (
|
||||
event.data["action"] in {"create", "update"}
|
||||
and (entry := entity_registry.async_get(event.data["entity_id"]))
|
||||
and entry.unique_id in self._attributes_config[CONF_GROUP]
|
||||
) or (
|
||||
event.data["action"] == "remove"
|
||||
and self._group_entity_ids is not None
|
||||
and event.data["entity_id"] in self._group_entity_ids
|
||||
):
|
||||
self._update_group_entity_ids()
|
||||
self._attr_extra_state_attributes[ATTR_ENTITY_ID] = (
|
||||
self._group_entity_ids
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
self.hass.bus.async_listen(
|
||||
er.EVENT_ENTITY_REGISTRY_UPDATED,
|
||||
_handle_entity_registry_updated,
|
||||
)
|
||||
)
|
||||
|
||||
def _update_group_entity_ids(self) -> None:
|
||||
"""Set the entity_id property if the entity represents a group of entities.
|
||||
|
||||
Setting entity_id in the extra state attributes will show the discovered entity
|
||||
as a group and will show the member entities in the UI.
|
||||
"""
|
||||
if CONF_GROUP not in self._attributes_config:
|
||||
self._default_entity_icon = None
|
||||
return
|
||||
self._attr_icon = self._attr_icon or self._default_group_icon
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
self._group_entity_ids = []
|
||||
for resource_id in self._attributes_config[CONF_GROUP]:
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
self.entity_id.split(".")[0], DOMAIN, resource_id
|
||||
):
|
||||
self._group_entity_ids.append(entity_id)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe MQTT events."""
|
||||
await super().async_added_to_hass()
|
||||
self._update_group_entity_ids()
|
||||
if self._group_entity_ids is not None:
|
||||
self._monitor_member_updates()
|
||||
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: self._group_entity_ids}
|
||||
|
||||
self._attributes_prepare_subscribe_topics()
|
||||
self._attributes_subscribe_topics()
|
||||
|
||||
@@ -610,14 +546,12 @@ class MqttAttributesMixin(Entity):
|
||||
_LOGGER.warning("Erroneous JSON: %s", payload)
|
||||
else:
|
||||
if isinstance(json_dict, dict):
|
||||
filtered_dict: dict[str, Any] = {
|
||||
filtered_dict = {
|
||||
k: v
|
||||
for k, v in json_dict.items()
|
||||
if k not in MQTT_ATTRIBUTES_BLOCKED
|
||||
and k not in self._attributes_extra_blocked
|
||||
}
|
||||
if self._group_entity_ids is not None:
|
||||
filtered_dict[ATTR_ENTITY_ID] = self._group_entity_ids
|
||||
if hasattr(self, "_process_update_extra_state_attributes"):
|
||||
self._process_update_extra_state_attributes(filtered_dict)
|
||||
else:
|
||||
|
@@ -239,7 +239,6 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity):
|
||||
"""Representation of a MQTT light."""
|
||||
|
||||
_default_name = DEFAULT_NAME
|
||||
_default_group_icon = "mdi:lightbulb-group"
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
_attributes_extra_blocked = MQTT_LIGHT_ATTRIBUTES_BLOCKED
|
||||
_topic: dict[str, str | None]
|
||||
|
@@ -164,7 +164,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
"""Representation of a MQTT JSON light."""
|
||||
|
||||
_default_name = DEFAULT_NAME
|
||||
_default_group_icon = "mdi:lightbulb-group"
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
_attributes_extra_blocked = MQTT_LIGHT_ATTRIBUTES_BLOCKED
|
||||
|
||||
|
@@ -121,7 +121,6 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity):
|
||||
"""Representation of a MQTT Template light."""
|
||||
|
||||
_default_name = DEFAULT_NAME
|
||||
_default_group_icon = "mdi:lightbulb-group"
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
_attributes_extra_blocked = MQTT_LIGHT_ATTRIBUTES_BLOCKED
|
||||
_optimistic: bool
|
||||
|
@@ -40,7 +40,7 @@ class P1MonitorFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
port=user_input[CONF_PORT],
|
||||
session=session,
|
||||
) as client:
|
||||
await client.smartmeter()
|
||||
await client.settings()
|
||||
except P1MonitorError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
|
3
homeassistant/components/prowl/const.py
Normal file
3
homeassistant/components/prowl/const.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Constants for the Prowl Notification service."""
|
||||
|
||||
DOMAIN = "prowl"
|
@@ -3,6 +3,9 @@
|
||||
"name": "Prowl",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/prowl",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "legacy"
|
||||
"loggers": ["prowl"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["prowlpy==1.0.2"]
|
||||
}
|
||||
|
@@ -3,9 +3,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import prowlpy
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
@@ -17,12 +19,11 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_RESOURCE = "https://api.prowlapp.com/publicapi/"
|
||||
|
||||
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend({vol.Required(CONF_API_KEY): cv.string})
|
||||
|
||||
@@ -33,46 +34,49 @@ async def async_get_service(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> ProwlNotificationService:
|
||||
"""Get the Prowl notification service."""
|
||||
return ProwlNotificationService(hass, config[CONF_API_KEY])
|
||||
prowl = await hass.async_add_executor_job(
|
||||
partial(prowlpy.Prowl, apikey=config[CONF_API_KEY])
|
||||
)
|
||||
return ProwlNotificationService(hass, prowl)
|
||||
|
||||
|
||||
class ProwlNotificationService(BaseNotificationService):
|
||||
"""Implement the notification service for Prowl."""
|
||||
|
||||
def __init__(self, hass, api_key):
|
||||
def __init__(self, hass: HomeAssistant, prowl: prowlpy.Prowl) -> None:
|
||||
"""Initialize the service."""
|
||||
self._hass = hass
|
||||
self._api_key = api_key
|
||||
self._prowl = prowl
|
||||
|
||||
async def async_send_message(self, message, **kwargs):
|
||||
async def async_send_message(self, message: str, **kwargs: Any) -> None:
|
||||
"""Send the message to the user."""
|
||||
response = None
|
||||
session = None
|
||||
url = f"{_RESOURCE}add"
|
||||
data = kwargs.get(ATTR_DATA)
|
||||
payload = {
|
||||
"apikey": self._api_key,
|
||||
"application": "Home-Assistant",
|
||||
"event": kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT),
|
||||
"description": message,
|
||||
"priority": data["priority"] if data and "priority" in data else 0,
|
||||
}
|
||||
if data and data.get("url"):
|
||||
payload["url"] = data["url"]
|
||||
|
||||
_LOGGER.debug("Attempting call Prowl service at %s", url)
|
||||
session = async_get_clientsession(self._hass)
|
||||
data = kwargs.get(ATTR_DATA, {})
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
response = await session.post(url, data=payload)
|
||||
result = await response.text()
|
||||
|
||||
if response.status != HTTPStatus.OK or "error" in result:
|
||||
_LOGGER.error(
|
||||
"Prowl service returned http status %d, response %s",
|
||||
response.status,
|
||||
result,
|
||||
await self._hass.async_add_executor_job(
|
||||
partial(
|
||||
self._prowl.send,
|
||||
application="Home-Assistant",
|
||||
event=kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT),
|
||||
description=message,
|
||||
priority=data.get("priority", 0),
|
||||
url=data.get("url"),
|
||||
)
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout accessing Prowl at %s", url)
|
||||
except TimeoutError as ex:
|
||||
_LOGGER.error("Timeout accessing Prowl API")
|
||||
raise HomeAssistantError("Timeout accessing Prowl API") from ex
|
||||
except prowlpy.APIError as ex:
|
||||
if str(ex).startswith("Invalid API key"):
|
||||
_LOGGER.error("Invalid API key for Prowl service")
|
||||
raise HomeAssistantError("Invalid API key for Prowl service") from ex
|
||||
if str(ex).startswith("Not accepted"):
|
||||
_LOGGER.error("Prowl returned: exceeded rate limit")
|
||||
raise HomeAssistantError(
|
||||
"Prowl service reported: exceeded rate limit"
|
||||
) from ex
|
||||
_LOGGER.error("Unexpected error when calling Prowl API: %s", str(ex))
|
||||
raise HomeAssistantError("Unexpected error when calling Prowl API") from ex
|
||||
|
@@ -16,6 +16,7 @@ from homeassistant.components.media_source import (
|
||||
Unresolvable,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util.location import vincenty
|
||||
|
||||
from . import RadioBrowserConfigEntry
|
||||
from .const import DOMAIN
|
||||
@@ -88,6 +89,7 @@ class RadioMediaSource(MediaSource):
|
||||
*await self._async_build_popular(radios, item),
|
||||
*await self._async_build_by_tag(radios, item),
|
||||
*await self._async_build_by_language(radios, item),
|
||||
*await self._async_build_local(radios, item),
|
||||
*await self._async_build_by_country(radios, item),
|
||||
],
|
||||
)
|
||||
@@ -292,3 +294,63 @@ class RadioMediaSource(MediaSource):
|
||||
]
|
||||
|
||||
return []
|
||||
|
||||
def _filter_local_stations(
|
||||
self, stations: list[Station], latitude: float, longitude: float
|
||||
) -> list[Station]:
|
||||
return [
|
||||
station
|
||||
for station in stations
|
||||
if station.latitude is not None
|
||||
and station.longitude is not None
|
||||
and (
|
||||
(
|
||||
dist := vincenty(
|
||||
(latitude, longitude),
|
||||
(station.latitude, station.longitude),
|
||||
False,
|
||||
)
|
||||
)
|
||||
is not None
|
||||
)
|
||||
and dist < 100
|
||||
]
|
||||
|
||||
async def _async_build_local(
|
||||
self, radios: RadioBrowser, item: MediaSourceItem
|
||||
) -> list[BrowseMediaSource]:
|
||||
"""Handle browsing local radio stations."""
|
||||
|
||||
if item.identifier == "local":
|
||||
country = self.hass.config.country
|
||||
stations = await radios.stations(
|
||||
filter_by=FilterBy.COUNTRY_CODE_EXACT,
|
||||
filter_term=country,
|
||||
hide_broken=True,
|
||||
order=Order.NAME,
|
||||
reverse=False,
|
||||
)
|
||||
|
||||
local_stations = await self.hass.async_add_executor_job(
|
||||
self._filter_local_stations,
|
||||
stations,
|
||||
self.hass.config.latitude,
|
||||
self.hass.config.longitude,
|
||||
)
|
||||
|
||||
return self._async_build_stations(radios, local_stations)
|
||||
|
||||
if not item.identifier:
|
||||
return [
|
||||
BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier="local",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_type=MediaType.MUSIC,
|
||||
title="Local stations",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
)
|
||||
]
|
||||
|
||||
return []
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioridwell"],
|
||||
"requirements": ["aioridwell==2024.01.0"]
|
||||
"requirements": ["aioridwell==2025.09.0"]
|
||||
}
|
||||
|
@@ -1,59 +1,67 @@
|
||||
"""Support for Satel Integra devices."""
|
||||
|
||||
import collections
|
||||
import logging
|
||||
|
||||
from satel_integra.satel_integra import AsyncSatel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import (
|
||||
CONF_CODE,
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_PORT,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
DEFAULT_ALARM_NAME = "satel_integra"
|
||||
DEFAULT_PORT = 7094
|
||||
DEFAULT_CONF_ARM_HOME_MODE = 1
|
||||
DEFAULT_DEVICE_PARTITION = 1
|
||||
DEFAULT_ZONE_TYPE = "motion"
|
||||
from .const import (
|
||||
CONF_ARM_HOME_MODE,
|
||||
CONF_DEVICE_PARTITIONS,
|
||||
CONF_OUTPUT_NUMBER,
|
||||
CONF_OUTPUTS,
|
||||
CONF_PARTITION_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUTS,
|
||||
CONF_ZONE_NUMBER,
|
||||
CONF_ZONE_TYPE,
|
||||
CONF_ZONES,
|
||||
DEFAULT_CONF_ARM_HOME_MODE,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_ZONE_TYPE,
|
||||
DOMAIN,
|
||||
SIGNAL_OUTPUTS_UPDATED,
|
||||
SIGNAL_PANEL_MESSAGE,
|
||||
SIGNAL_ZONES_UPDATED,
|
||||
SUBENTRY_TYPE_OUTPUT,
|
||||
SUBENTRY_TYPE_PARTITION,
|
||||
SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
|
||||
SUBENTRY_TYPE_ZONE,
|
||||
ZONES,
|
||||
SatelConfigEntry,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "satel_integra"
|
||||
PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.SWITCH]
|
||||
|
||||
DATA_SATEL = "satel_integra"
|
||||
|
||||
CONF_DEVICE_CODE = "code"
|
||||
CONF_DEVICE_PARTITIONS = "partitions"
|
||||
CONF_ARM_HOME_MODE = "arm_home_mode"
|
||||
CONF_ZONE_NAME = "name"
|
||||
CONF_ZONE_TYPE = "type"
|
||||
CONF_ZONES = "zones"
|
||||
CONF_OUTPUTS = "outputs"
|
||||
CONF_SWITCHABLE_OUTPUTS = "switchable_outputs"
|
||||
|
||||
ZONES = "zones"
|
||||
|
||||
SIGNAL_PANEL_MESSAGE = "satel_integra.panel_message"
|
||||
SIGNAL_PANEL_ARM_AWAY = "satel_integra.panel_arm_away"
|
||||
SIGNAL_PANEL_ARM_HOME = "satel_integra.panel_arm_home"
|
||||
SIGNAL_PANEL_DISARM = "satel_integra.panel_disarm"
|
||||
|
||||
SIGNAL_ZONES_UPDATED = "satel_integra.zones_updated"
|
||||
SIGNAL_OUTPUTS_UPDATED = "satel_integra.outputs_updated"
|
||||
|
||||
ZONE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ZONE_NAME): cv.string,
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): cv.string,
|
||||
}
|
||||
)
|
||||
EDITABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_ZONE_NAME): cv.string})
|
||||
EDITABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
|
||||
PARTITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ZONE_NAME): cv.string,
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ARM_HOME_MODE, default=DEFAULT_CONF_ARM_HOME_MODE): vol.In(
|
||||
[1, 2, 3]
|
||||
),
|
||||
@@ -63,7 +71,7 @@ PARTITION_SCHEMA = vol.Schema(
|
||||
|
||||
def is_alarm_code_necessary(value):
|
||||
"""Check if alarm code must be configured."""
|
||||
if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_DEVICE_CODE not in value:
|
||||
if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_CODE not in value:
|
||||
raise vol.Invalid("You need to specify alarm code to use switchable_outputs")
|
||||
|
||||
return value
|
||||
@@ -75,7 +83,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_DEVICE_CODE): cv.string,
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
vol.Optional(CONF_DEVICE_PARTITIONS, default={}): {
|
||||
vol.Coerce(int): PARTITION_SCHEMA
|
||||
},
|
||||
@@ -92,64 +100,106 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Satel Integra component."""
|
||||
conf = config[DOMAIN]
|
||||
async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool:
|
||||
"""Set up Satel Integra from YAML."""
|
||||
|
||||
zones = conf.get(CONF_ZONES)
|
||||
outputs = conf.get(CONF_OUTPUTS)
|
||||
switchable_outputs = conf.get(CONF_SWITCHABLE_OUTPUTS)
|
||||
host = conf.get(CONF_HOST)
|
||||
port = conf.get(CONF_PORT)
|
||||
partitions = conf.get(CONF_DEVICE_PARTITIONS)
|
||||
if config := hass_config.get(DOMAIN):
|
||||
hass.async_create_task(_async_import(hass, config))
|
||||
|
||||
monitored_outputs = collections.OrderedDict(
|
||||
list(outputs.items()) + list(switchable_outputs.items())
|
||||
return True
|
||||
|
||||
|
||||
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Process YAML import."""
|
||||
|
||||
if not hass.config_entries.async_entries(DOMAIN):
|
||||
# Start import flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
||||
)
|
||||
|
||||
if result.get("type") == FlowResultType.ABORT:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_issue_cannot_connect",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue_cannot_connect",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Satel Integra",
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Satel Integra",
|
||||
},
|
||||
)
|
||||
|
||||
controller = AsyncSatel(host, port, hass.loop, zones, monitored_outputs, partitions)
|
||||
|
||||
hass.data[DATA_SATEL] = controller
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: SatelConfigEntry) -> bool:
|
||||
"""Set up Satel Integra from a config entry."""
|
||||
|
||||
host = entry.data[CONF_HOST]
|
||||
port = entry.data[CONF_PORT]
|
||||
|
||||
# Make sure we initialize the Satel controller with the configured entries to monitor
|
||||
partitions = [
|
||||
subentry.data[CONF_PARTITION_NUMBER]
|
||||
for subentry in entry.subentries.values()
|
||||
if subentry.subentry_type == SUBENTRY_TYPE_PARTITION
|
||||
]
|
||||
|
||||
zones = [
|
||||
subentry.data[CONF_ZONE_NUMBER]
|
||||
for subentry in entry.subentries.values()
|
||||
if subentry.subentry_type == SUBENTRY_TYPE_ZONE
|
||||
]
|
||||
|
||||
outputs = [
|
||||
subentry.data[CONF_OUTPUT_NUMBER]
|
||||
for subentry in entry.subentries.values()
|
||||
if subentry.subentry_type == SUBENTRY_TYPE_OUTPUT
|
||||
]
|
||||
|
||||
switchable_outputs = [
|
||||
subentry.data[CONF_SWITCHABLE_OUTPUT_NUMBER]
|
||||
for subentry in entry.subentries.values()
|
||||
if subentry.subentry_type == SUBENTRY_TYPE_SWITCHABLE_OUTPUT
|
||||
]
|
||||
|
||||
monitored_outputs = outputs + switchable_outputs
|
||||
|
||||
controller = AsyncSatel(host, port, hass.loop, zones, monitored_outputs, partitions)
|
||||
|
||||
result = await controller.connect()
|
||||
|
||||
if not result:
|
||||
return False
|
||||
raise ConfigEntryNotReady("Controller failed to connect")
|
||||
|
||||
entry.runtime_data = controller
|
||||
|
||||
@callback
|
||||
def _close(*_):
|
||||
controller.close()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close)
|
||||
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
|
||||
|
||||
_LOGGER.debug("Arm home config: %s, mode: %s ", conf, conf.get(CONF_ARM_HOME_MODE))
|
||||
|
||||
hass.async_create_task(
|
||||
async_load_platform(hass, Platform.ALARM_CONTROL_PANEL, DOMAIN, conf, config)
|
||||
)
|
||||
|
||||
hass.async_create_task(
|
||||
async_load_platform(
|
||||
hass,
|
||||
Platform.BINARY_SENSOR,
|
||||
DOMAIN,
|
||||
{CONF_ZONES: zones, CONF_OUTPUTS: outputs},
|
||||
config,
|
||||
)
|
||||
)
|
||||
|
||||
hass.async_create_task(
|
||||
async_load_platform(
|
||||
hass,
|
||||
Platform.SWITCH,
|
||||
DOMAIN,
|
||||
{
|
||||
CONF_SWITCHABLE_OUTPUTS: switchable_outputs,
|
||||
CONF_DEVICE_CODE: conf.get(CONF_DEVICE_CODE),
|
||||
},
|
||||
config,
|
||||
)
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@callback
|
||||
def alarm_status_update_callback():
|
||||
@@ -179,3 +229,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: SatelConfigEntry) -> bool:
|
||||
"""Unloading the Satel platforms."""
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
controller = entry.runtime_data
|
||||
controller.close()
|
||||
|
||||
return unload_ok
|
||||
|
@@ -14,46 +14,49 @@ from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelState,
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import (
|
||||
from .const import (
|
||||
CONF_ARM_HOME_MODE,
|
||||
CONF_DEVICE_PARTITIONS,
|
||||
CONF_ZONE_NAME,
|
||||
DATA_SATEL,
|
||||
CONF_PARTITION_NUMBER,
|
||||
SIGNAL_PANEL_MESSAGE,
|
||||
SUBENTRY_TYPE_PARTITION,
|
||||
SatelConfigEntry,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
config_entry: SatelConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up for Satel Integra alarm panels."""
|
||||
if not discovery_info:
|
||||
return
|
||||
|
||||
configured_partitions = discovery_info[CONF_DEVICE_PARTITIONS]
|
||||
controller = hass.data[DATA_SATEL]
|
||||
controller = config_entry.runtime_data
|
||||
|
||||
devices = []
|
||||
partition_subentries = filter(
|
||||
lambda entry: entry.subentry_type == SUBENTRY_TYPE_PARTITION,
|
||||
config_entry.subentries.values(),
|
||||
)
|
||||
|
||||
for partition_num, device_config_data in configured_partitions.items():
|
||||
zone_name = device_config_data[CONF_ZONE_NAME]
|
||||
arm_home_mode = device_config_data.get(CONF_ARM_HOME_MODE)
|
||||
device = SatelIntegraAlarmPanel(
|
||||
controller, zone_name, arm_home_mode, partition_num
|
||||
for subentry in partition_subentries:
|
||||
partition_num = subentry.data[CONF_PARTITION_NUMBER]
|
||||
zone_name = subentry.data[CONF_NAME]
|
||||
arm_home_mode = subentry.data[CONF_ARM_HOME_MODE]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
SatelIntegraAlarmPanel(
|
||||
controller, zone_name, arm_home_mode, partition_num
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
devices.append(device)
|
||||
|
||||
async_add_entities(devices)
|
||||
|
||||
|
||||
class SatelIntegraAlarmPanel(AlarmControlPanelEntity):
|
||||
@@ -66,7 +69,7 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity):
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
)
|
||||
|
||||
def __init__(self, controller, name, arm_home_mode, partition_id):
|
||||
def __init__(self, controller, name, arm_home_mode, partition_id) -> None:
|
||||
"""Initialize the alarm panel."""
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = f"satel_alarm_panel_{partition_id}"
|
||||
|
@@ -6,61 +6,79 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import (
|
||||
CONF_OUTPUTS,
|
||||
CONF_ZONE_NAME,
|
||||
from .const import (
|
||||
CONF_OUTPUT_NUMBER,
|
||||
CONF_ZONE_NUMBER,
|
||||
CONF_ZONE_TYPE,
|
||||
CONF_ZONES,
|
||||
DATA_SATEL,
|
||||
SIGNAL_OUTPUTS_UPDATED,
|
||||
SIGNAL_ZONES_UPDATED,
|
||||
SUBENTRY_TYPE_OUTPUT,
|
||||
SUBENTRY_TYPE_ZONE,
|
||||
SatelConfigEntry,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
config_entry: SatelConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Satel Integra binary sensor devices."""
|
||||
if not discovery_info:
|
||||
return
|
||||
|
||||
configured_zones = discovery_info[CONF_ZONES]
|
||||
controller = hass.data[DATA_SATEL]
|
||||
controller = config_entry.runtime_data
|
||||
|
||||
devices = []
|
||||
zone_subentries = filter(
|
||||
lambda entry: entry.subentry_type == SUBENTRY_TYPE_ZONE,
|
||||
config_entry.subentries.values(),
|
||||
)
|
||||
|
||||
for zone_num, device_config_data in configured_zones.items():
|
||||
zone_type = device_config_data[CONF_ZONE_TYPE]
|
||||
zone_name = device_config_data[CONF_ZONE_NAME]
|
||||
device = SatelIntegraBinarySensor(
|
||||
controller, zone_num, zone_name, zone_type, CONF_ZONES, SIGNAL_ZONES_UPDATED
|
||||
for subentry in zone_subentries:
|
||||
zone_num = subentry.data[CONF_ZONE_NUMBER]
|
||||
zone_type = subentry.data[CONF_ZONE_TYPE]
|
||||
zone_name = subentry.data[CONF_NAME]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
SatelIntegraBinarySensor(
|
||||
controller,
|
||||
zone_num,
|
||||
zone_name,
|
||||
zone_type,
|
||||
SUBENTRY_TYPE_ZONE,
|
||||
SIGNAL_ZONES_UPDATED,
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
devices.append(device)
|
||||
|
||||
configured_outputs = discovery_info[CONF_OUTPUTS]
|
||||
output_subentries = filter(
|
||||
lambda entry: entry.subentry_type == SUBENTRY_TYPE_OUTPUT,
|
||||
config_entry.subentries.values(),
|
||||
)
|
||||
|
||||
for zone_num, device_config_data in configured_outputs.items():
|
||||
zone_type = device_config_data[CONF_ZONE_TYPE]
|
||||
zone_name = device_config_data[CONF_ZONE_NAME]
|
||||
device = SatelIntegraBinarySensor(
|
||||
controller,
|
||||
zone_num,
|
||||
zone_name,
|
||||
zone_type,
|
||||
CONF_OUTPUTS,
|
||||
SIGNAL_OUTPUTS_UPDATED,
|
||||
for subentry in output_subentries:
|
||||
output_num = subentry.data[CONF_OUTPUT_NUMBER]
|
||||
ouput_type = subentry.data[CONF_ZONE_TYPE]
|
||||
output_name = subentry.data[CONF_NAME]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
SatelIntegraBinarySensor(
|
||||
controller,
|
||||
output_num,
|
||||
output_name,
|
||||
ouput_type,
|
||||
SUBENTRY_TYPE_OUTPUT,
|
||||
SIGNAL_OUTPUTS_UPDATED,
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
devices.append(device)
|
||||
|
||||
async_add_entities(devices)
|
||||
|
||||
|
||||
class SatelIntegraBinarySensor(BinarySensorEntity):
|
||||
|
496
homeassistant/components/satel_integra/config_flow.py
Normal file
496
homeassistant/components/satel_integra/config_flow.py
Normal file
@@ -0,0 +1,496 @@
|
||||
"""Config flow for Satel Integra."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from satel_integra.satel_integra import AsyncSatel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryData,
|
||||
ConfigSubentryFlow,
|
||||
OptionsFlowWithReload,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv, selector
|
||||
|
||||
from .const import (
|
||||
CONF_ARM_HOME_MODE,
|
||||
CONF_DEVICE_PARTITIONS,
|
||||
CONF_OUTPUT_NUMBER,
|
||||
CONF_OUTPUTS,
|
||||
CONF_PARTITION_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUTS,
|
||||
CONF_ZONE_NUMBER,
|
||||
CONF_ZONE_TYPE,
|
||||
CONF_ZONES,
|
||||
DEFAULT_CONF_ARM_HOME_MODE,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
SUBENTRY_TYPE_OUTPUT,
|
||||
SUBENTRY_TYPE_PARTITION,
|
||||
SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
|
||||
SUBENTRY_TYPE_ZONE,
|
||||
SatelConfigEntry,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
CONNECTION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
CODE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
PARTITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_ARM_HOME_MODE, default=DEFAULT_CONF_ARM_HOME_MODE): vol.In(
|
||||
[1, 2, 3]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
ZONE_AND_OUTPUT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(
|
||||
CONF_ZONE_TYPE, default=BinarySensorDeviceClass.MOTION
|
||||
): selector.SelectSelector(
|
||||
selector.SelectSelectorConfig(
|
||||
options=[cls.value for cls in BinarySensorDeviceClass],
|
||||
mode=selector.SelectSelectorMode.DROPDOWN,
|
||||
translation_key="binary_sensor_device_class",
|
||||
sort=True,
|
||||
),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
SWITCHABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
|
||||
|
||||
|
||||
class SatelConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a Satel Integra config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: SatelConfigEntry,
|
||||
) -> SatelOptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return SatelOptionsFlow()
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {
|
||||
SUBENTRY_TYPE_PARTITION: PartitionSubentryFlowHandler,
|
||||
SUBENTRY_TYPE_ZONE: ZoneSubentryFlowHandler,
|
||||
SUBENTRY_TYPE_OUTPUT: OutputSubentryFlowHandler,
|
||||
SUBENTRY_TYPE_SWITCHABLE_OUTPUT: SwitchableOutputSubentryFlowHandler,
|
||||
}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
valid = await self.test_connection(
|
||||
user_input[CONF_HOST], user_input[CONF_PORT]
|
||||
)
|
||||
|
||||
if valid:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_HOST],
|
||||
data={
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
},
|
||||
options={CONF_CODE: user_input.get(CONF_CODE)},
|
||||
)
|
||||
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=CONNECTION_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_import(
|
||||
self, import_config: dict[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by import."""
|
||||
|
||||
valid = await self.test_connection(
|
||||
import_config[CONF_HOST], import_config.get(CONF_PORT, DEFAULT_PORT)
|
||||
)
|
||||
|
||||
if valid:
|
||||
subentries: list[ConfigSubentryData] = []
|
||||
|
||||
for partition_number, partition_data in import_config.get(
|
||||
CONF_DEVICE_PARTITIONS, {}
|
||||
).items():
|
||||
subentries.append(
|
||||
{
|
||||
"subentry_type": SUBENTRY_TYPE_PARTITION,
|
||||
"title": partition_data[CONF_NAME],
|
||||
"unique_id": f"{SUBENTRY_TYPE_PARTITION}_{partition_number}",
|
||||
"data": {
|
||||
CONF_NAME: partition_data[CONF_NAME],
|
||||
CONF_ARM_HOME_MODE: partition_data.get(
|
||||
CONF_ARM_HOME_MODE, DEFAULT_CONF_ARM_HOME_MODE
|
||||
),
|
||||
CONF_PARTITION_NUMBER: partition_number,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
for zone_number, zone_data in import_config.get(CONF_ZONES, {}).items():
|
||||
subentries.append(
|
||||
{
|
||||
"subentry_type": SUBENTRY_TYPE_ZONE,
|
||||
"title": zone_data[CONF_NAME],
|
||||
"unique_id": f"{SUBENTRY_TYPE_ZONE}_{zone_number}",
|
||||
"data": {
|
||||
CONF_NAME: zone_data[CONF_NAME],
|
||||
CONF_ZONE_NUMBER: zone_number,
|
||||
CONF_ZONE_TYPE: zone_data.get(
|
||||
CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
for output_number, output_data in import_config.get(
|
||||
CONF_OUTPUTS, {}
|
||||
).items():
|
||||
subentries.append(
|
||||
{
|
||||
"subentry_type": SUBENTRY_TYPE_OUTPUT,
|
||||
"title": output_data[CONF_NAME],
|
||||
"unique_id": f"{SUBENTRY_TYPE_OUTPUT}_{output_number}",
|
||||
"data": {
|
||||
CONF_NAME: output_data[CONF_NAME],
|
||||
CONF_OUTPUT_NUMBER: output_number,
|
||||
CONF_ZONE_TYPE: output_data.get(
|
||||
CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
for switchable_output_number, switchable_output_data in import_config.get(
|
||||
CONF_SWITCHABLE_OUTPUTS, {}
|
||||
).items():
|
||||
subentries.append(
|
||||
{
|
||||
"subentry_type": SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
|
||||
"title": switchable_output_data[CONF_NAME],
|
||||
"unique_id": f"{SUBENTRY_TYPE_SWITCHABLE_OUTPUT}_{switchable_output_number}",
|
||||
"data": {
|
||||
CONF_NAME: switchable_output_data[CONF_NAME],
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER: switchable_output_number,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=import_config[CONF_HOST],
|
||||
data={
|
||||
CONF_HOST: import_config[CONF_HOST],
|
||||
CONF_PORT: import_config.get(CONF_PORT, DEFAULT_PORT),
|
||||
},
|
||||
options={CONF_CODE: import_config.get(CONF_CODE)},
|
||||
subentries=subentries,
|
||||
)
|
||||
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
async def test_connection(self, host: str, port: int) -> bool:
|
||||
"""Test a connection to the Satel alarm."""
|
||||
controller = AsyncSatel(host, port, self.hass.loop)
|
||||
|
||||
result = await controller.connect()
|
||||
|
||||
# Make sure we close the connection again
|
||||
controller.close()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class SatelOptionsFlow(OptionsFlowWithReload):
|
||||
"""Handle Satel options flow."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Init step."""
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(data={CONF_CODE: user_input.get(CONF_CODE)})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
CODE_SCHEMA, self.config_entry.options
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class PartitionSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle subentry flow for adding and modifying a partition."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""User flow to add new partition."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
unique_id = f"{SUBENTRY_TYPE_PARTITION}_{user_input[CONF_PARTITION_NUMBER]}"
|
||||
|
||||
for existing_subentry in self._get_entry().subentries.values():
|
||||
if existing_subentry.unique_id == unique_id:
|
||||
errors[CONF_PARTITION_NUMBER] = "already_configured"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input, unique_id=unique_id
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PARTITION_NUMBER): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
}
|
||||
).extend(PARTITION_SCHEMA.schema),
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Reconfigure existing partition."""
|
||||
subconfig_entry = self._get_reconfigure_subentry()
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
subconfig_entry,
|
||||
title=user_input[CONF_NAME],
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
PARTITION_SCHEMA,
|
||||
subconfig_entry.data,
|
||||
),
|
||||
description_placeholders={
|
||||
CONF_PARTITION_NUMBER: subconfig_entry.data[CONF_PARTITION_NUMBER]
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ZoneSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle subentry flow for adding and modifying a zone."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""User flow to add new zone."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
unique_id = f"{SUBENTRY_TYPE_ZONE}_{user_input[CONF_ZONE_NUMBER]}"
|
||||
|
||||
for existing_subentry in self._get_entry().subentries.values():
|
||||
if existing_subentry.unique_id == unique_id:
|
||||
errors[CONF_ZONE_NUMBER] = "already_configured"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input, unique_id=unique_id
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ZONE_NUMBER): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
}
|
||||
).extend(ZONE_AND_OUTPUT_SCHEMA.schema),
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Reconfigure existing zone."""
|
||||
subconfig_entry = self._get_reconfigure_subentry()
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
subconfig_entry,
|
||||
title=user_input[CONF_NAME],
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
ZONE_AND_OUTPUT_SCHEMA, subconfig_entry.data
|
||||
),
|
||||
description_placeholders={
|
||||
CONF_ZONE_NUMBER: subconfig_entry.data[CONF_ZONE_NUMBER]
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class OutputSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle subentry flow for adding and modifying a output."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""User flow to add new output."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
unique_id = f"{SUBENTRY_TYPE_OUTPUT}_{user_input[CONF_OUTPUT_NUMBER]}"
|
||||
|
||||
for existing_subentry in self._get_entry().subentries.values():
|
||||
if existing_subentry.unique_id == unique_id:
|
||||
errors[CONF_OUTPUT_NUMBER] = "already_configured"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input, unique_id=unique_id
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OUTPUT_NUMBER): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
}
|
||||
).extend(ZONE_AND_OUTPUT_SCHEMA.schema),
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Reconfigure existing output."""
|
||||
subconfig_entry = self._get_reconfigure_subentry()
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
subconfig_entry,
|
||||
title=user_input[CONF_NAME],
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
ZONE_AND_OUTPUT_SCHEMA, subconfig_entry.data
|
||||
),
|
||||
description_placeholders={
|
||||
CONF_OUTPUT_NUMBER: subconfig_entry.data[CONF_OUTPUT_NUMBER]
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class SwitchableOutputSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle subentry flow for adding and modifying a switchable output."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""User flow to add new switchable output."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
unique_id = f"{SUBENTRY_TYPE_SWITCHABLE_OUTPUT}_{user_input[CONF_SWITCHABLE_OUTPUT_NUMBER]}"
|
||||
|
||||
for existing_subentry in self._get_entry().subentries.values():
|
||||
if existing_subentry.unique_id == unique_id:
|
||||
errors[CONF_SWITCHABLE_OUTPUT_NUMBER] = "already_configured"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input, unique_id=unique_id
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SWITCHABLE_OUTPUT_NUMBER): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
}
|
||||
).extend(SWITCHABLE_OUTPUT_SCHEMA.schema),
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Reconfigure existing switchable output."""
|
||||
subconfig_entry = self._get_reconfigure_subentry()
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
subconfig_entry,
|
||||
title=user_input[CONF_NAME],
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
SWITCHABLE_OUTPUT_SCHEMA, subconfig_entry.data
|
||||
),
|
||||
description_placeholders={
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER: subconfig_entry.data[
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER
|
||||
]
|
||||
},
|
||||
)
|
38
homeassistant/components/satel_integra/const.py
Normal file
38
homeassistant/components/satel_integra/const.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Constants for the Satel Integra integration."""
|
||||
|
||||
from satel_integra.satel_integra import AsyncSatel
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
DEFAULT_CONF_ARM_HOME_MODE = 1
|
||||
DEFAULT_PORT = 7094
|
||||
DEFAULT_ZONE_TYPE = "motion"
|
||||
|
||||
DOMAIN = "satel_integra"
|
||||
|
||||
SUBENTRY_TYPE_PARTITION = "partition"
|
||||
SUBENTRY_TYPE_ZONE = "zone"
|
||||
SUBENTRY_TYPE_OUTPUT = "output"
|
||||
SUBENTRY_TYPE_SWITCHABLE_OUTPUT = "switchable_output"
|
||||
|
||||
CONF_PARTITION_NUMBER = "partition_number"
|
||||
CONF_ZONE_NUMBER = "zone_number"
|
||||
CONF_OUTPUT_NUMBER = "output_number"
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER = "switchable_output_number"
|
||||
|
||||
CONF_DEVICE_PARTITIONS = "partitions"
|
||||
CONF_ARM_HOME_MODE = "arm_home_mode"
|
||||
CONF_ZONE_TYPE = "type"
|
||||
CONF_ZONES = "zones"
|
||||
CONF_OUTPUTS = "outputs"
|
||||
CONF_SWITCHABLE_OUTPUTS = "switchable_outputs"
|
||||
|
||||
ZONES = "zones"
|
||||
|
||||
|
||||
SIGNAL_PANEL_MESSAGE = "satel_integra.panel_message"
|
||||
|
||||
SIGNAL_ZONES_UPDATED = "satel_integra.zones_updated"
|
||||
SIGNAL_OUTPUTS_UPDATED = "satel_integra.outputs_updated"
|
||||
|
||||
type SatelConfigEntry = ConfigEntry[AsyncSatel]
|
@@ -1,10 +1,12 @@
|
||||
{
|
||||
"domain": "satel_integra",
|
||||
"name": "Satel Integra",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@Tommatheussen"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/satel_integra",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["satel_integra"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["satel-integra==0.3.7"]
|
||||
"requirements": ["satel-integra==0.3.7"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
210
homeassistant/components/satel_integra/strings.json
Normal file
210
homeassistant/components/satel_integra/strings.json
Normal file
@@ -0,0 +1,210 @@
|
||||
{
|
||||
"common": {
|
||||
"code_input_description": "Code to toggle switchable outputs",
|
||||
"code": "Access code"
|
||||
},
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"code": "[%key:component::satel_integra::common::code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The IP address of the alarm panel",
|
||||
"port": "The port of the alarm panel",
|
||||
"code": "[%key:component::satel_integra::common::code_input_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"partition": {
|
||||
"initiate_flow": {
|
||||
"user": "Add partition"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Configure partition",
|
||||
"data": {
|
||||
"partition_number": "Partition number",
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"arm_home_mode": "Arm home mode"
|
||||
},
|
||||
"data_description": {
|
||||
"partition_number": "Enter partition number to configure",
|
||||
"name": "The name to give to the alarm panel",
|
||||
"arm_home_mode": "The mode in which the partition is armed when 'arm home' is used. For more information on what the differences are between them, please refer to Satel Integra manual."
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure partition {partition_number}",
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"arm_home_mode": "[%key:component::satel_integra::config_subentries::partition::step::user::data::arm_home_mode%]"
|
||||
},
|
||||
"data_description": {
|
||||
"arm_home_mode": "[%key:component::satel_integra::config_subentries::partition::step::user::data_description::arm_home_mode%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"zone": {
|
||||
"initiate_flow": {
|
||||
"user": "Add zone"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Configure zone",
|
||||
"data": {
|
||||
"zone_number": "Zone number",
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"type": "Zone type"
|
||||
},
|
||||
"data_description": {
|
||||
"zone_number": "Enter zone number to configure",
|
||||
"name": "The name to give to the sensor",
|
||||
"type": "Choose the device class you would like the sensor to show as"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure zone {zone_number}",
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"type": "[%key:component::satel_integra::config_subentries::zone::step::user::data::type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::name%]",
|
||||
"type": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::type%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"output": {
|
||||
"initiate_flow": {
|
||||
"user": "Add output"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Configure output",
|
||||
"data": {
|
||||
"output_number": "Output number",
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"type": "Output type"
|
||||
},
|
||||
"data_description": {
|
||||
"output_number": "Enter output number to configure",
|
||||
"name": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::name%]",
|
||||
"type": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::type%]"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure output {output_number}",
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"type": "[%key:component::satel_integra::config_subentries::output::step::user::data::type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::name%]",
|
||||
"type": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::type%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"switchable_output": {
|
||||
"initiate_flow": {
|
||||
"user": "Add switchable output"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Configure switchable output",
|
||||
"data": {
|
||||
"switchable_output_number": "Switchable output number",
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
},
|
||||
"data_description": {
|
||||
"switchable_output_number": "Enter switchable output number to configure",
|
||||
"name": "The name to give to the switch"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure switchable output {switchable_output_number}",
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "[%key:component::satel_integra::config_subentries::switchable_output::step::user::data_description::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"code": "[%key:component::satel_integra::common::code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"code": "[%key:component::satel_integra::common::code_input_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "YAML import failed due to a connection error",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your existing configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the `{domain}` YAML configuration from your configuration.yaml file and add the {integration_title} integration manually."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"binary_sensor_device_class": {
|
||||
"options": {
|
||||
"battery": "[%key:component::binary_sensor::entity_component::battery::name%]",
|
||||
"battery_charging": "[%key:component::binary_sensor::entity_component::battery_charging::name%]",
|
||||
"carbon_monoxide": "[%key:component::binary_sensor::entity_component::carbon_monoxide::name%]",
|
||||
"cold": "[%key:component::binary_sensor::entity_component::cold::name%]",
|
||||
"connectivity": "[%key:component::binary_sensor::entity_component::connectivity::name%]",
|
||||
"door": "[%key:component::binary_sensor::entity_component::door::name%]",
|
||||
"garage_door": "[%key:component::binary_sensor::entity_component::garage_door::name%]",
|
||||
"gas": "[%key:component::binary_sensor::entity_component::gas::name%]",
|
||||
"heat": "[%key:component::binary_sensor::entity_component::heat::name%]",
|
||||
"light": "[%key:component::binary_sensor::entity_component::light::name%]",
|
||||
"lock": "[%key:component::binary_sensor::entity_component::lock::name%]",
|
||||
"moisture": "[%key:component::binary_sensor::entity_component::moisture::name%]",
|
||||
"motion": "[%key:component::binary_sensor::entity_component::motion::name%]",
|
||||
"moving": "[%key:component::binary_sensor::entity_component::moving::name%]",
|
||||
"occupancy": "[%key:component::binary_sensor::entity_component::occupancy::name%]",
|
||||
"opening": "[%key:component::binary_sensor::entity_component::opening::name%]",
|
||||
"plug": "[%key:component::binary_sensor::entity_component::plug::name%]",
|
||||
"power": "[%key:component::binary_sensor::entity_component::power::name%]",
|
||||
"presence": "[%key:component::binary_sensor::entity_component::presence::name%]",
|
||||
"problem": "[%key:component::binary_sensor::entity_component::problem::name%]",
|
||||
"running": "[%key:component::binary_sensor::entity_component::running::name%]",
|
||||
"safety": "[%key:component::binary_sensor::entity_component::safety::name%]",
|
||||
"smoke": "[%key:component::binary_sensor::entity_component::smoke::name%]",
|
||||
"sound": "[%key:component::binary_sensor::entity_component::sound::name%]",
|
||||
"tamper": "[%key:component::binary_sensor::entity_component::tamper::name%]",
|
||||
"update": "[%key:component::binary_sensor::entity_component::update::name%]",
|
||||
"vibration": "[%key:component::binary_sensor::entity_component::vibration::name%]",
|
||||
"window": "[%key:component::binary_sensor::entity_component::window::name%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -6,48 +6,50 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.const import CONF_CODE, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import (
|
||||
CONF_DEVICE_CODE,
|
||||
CONF_SWITCHABLE_OUTPUTS,
|
||||
CONF_ZONE_NAME,
|
||||
DATA_SATEL,
|
||||
from .const import (
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER,
|
||||
SIGNAL_OUTPUTS_UPDATED,
|
||||
SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
|
||||
SatelConfigEntry,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ["satel_integra"]
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
config_entry: SatelConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Satel Integra switch devices."""
|
||||
if not discovery_info:
|
||||
return
|
||||
|
||||
configured_zones = discovery_info[CONF_SWITCHABLE_OUTPUTS]
|
||||
controller = hass.data[DATA_SATEL]
|
||||
controller = config_entry.runtime_data
|
||||
|
||||
devices = []
|
||||
switchable_output_subentries = filter(
|
||||
lambda entry: entry.subentry_type == SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
|
||||
config_entry.subentries.values(),
|
||||
)
|
||||
|
||||
for zone_num, device_config_data in configured_zones.items():
|
||||
zone_name = device_config_data[CONF_ZONE_NAME]
|
||||
for subentry in switchable_output_subentries:
|
||||
switchable_output_num = subentry.data[CONF_SWITCHABLE_OUTPUT_NUMBER]
|
||||
switchable_output_name = subentry.data[CONF_NAME]
|
||||
|
||||
device = SatelIntegraSwitch(
|
||||
controller, zone_num, zone_name, discovery_info[CONF_DEVICE_CODE]
|
||||
async_add_entities(
|
||||
[
|
||||
SatelIntegraSwitch(
|
||||
controller,
|
||||
switchable_output_num,
|
||||
switchable_output_name,
|
||||
config_entry.options.get(CONF_CODE),
|
||||
),
|
||||
],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
devices.append(device)
|
||||
|
||||
async_add_entities(devices)
|
||||
|
||||
|
||||
class SatelIntegraSwitch(SwitchEntity):
|
||||
|
@@ -1445,7 +1445,7 @@ RPC_SENSORS: Final = {
|
||||
"illuminance_illumination": RpcSensorDescription(
|
||||
key="illuminance",
|
||||
sub_key="illumination",
|
||||
name="Illuminance Level",
|
||||
name="Illuminance level",
|
||||
translation_key="illuminance_level",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["dark", "twilight", "bright"],
|
||||
|
@@ -7,7 +7,7 @@ from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_CONDITION, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.const import SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import (
|
||||
@@ -25,7 +25,6 @@ _CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "sun",
|
||||
vol.Optional("before"): cv.sun_event,
|
||||
vol.Optional("before_offset"): cv.time_period,
|
||||
vol.Optional("after"): vol.All(
|
||||
|
@@ -98,6 +98,7 @@ PLATFORMS_BY_TYPE = {
|
||||
SupportedModels.RGBICWW_FLOOR_LAMP.value: [Platform.LIGHT, Platform.SENSOR],
|
||||
SupportedModels.RGBICWW_STRIP_LIGHT.value: [Platform.LIGHT, Platform.SENSOR],
|
||||
SupportedModels.PLUG_MINI_EU.value: [Platform.SWITCH, Platform.SENSOR],
|
||||
SupportedModels.RELAY_SWITCH_2PM.value: [Platform.SWITCH, Platform.SENSOR],
|
||||
}
|
||||
CLASS_BY_DEVICE = {
|
||||
SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight,
|
||||
@@ -129,6 +130,7 @@ CLASS_BY_DEVICE = {
|
||||
SupportedModels.RGBICWW_FLOOR_LAMP.value: switchbot.SwitchbotRgbicLight,
|
||||
SupportedModels.RGBICWW_STRIP_LIGHT.value: switchbot.SwitchbotRgbicLight,
|
||||
SupportedModels.PLUG_MINI_EU.value: switchbot.SwitchbotRelaySwitch,
|
||||
SupportedModels.RELAY_SWITCH_2PM.value: switchbot.SwitchbotRelaySwitch2PM,
|
||||
}
|
||||
|
||||
|
||||
|
@@ -54,6 +54,7 @@ class SupportedModels(StrEnum):
|
||||
RGBICWW_STRIP_LIGHT = "rgbicww_strip_light"
|
||||
RGBICWW_FLOOR_LAMP = "rgbicww_floor_lamp"
|
||||
PLUG_MINI_EU = "plug_mini_eu"
|
||||
RELAY_SWITCH_2PM = "relay_switch_2pm"
|
||||
|
||||
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -87,6 +88,7 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
SwitchbotModel.RGBICWW_STRIP_LIGHT: SupportedModels.RGBICWW_STRIP_LIGHT,
|
||||
SwitchbotModel.RGBICWW_FLOOR_LAMP: SupportedModels.RGBICWW_FLOOR_LAMP,
|
||||
SwitchbotModel.PLUG_MINI_EU: SupportedModels.PLUG_MINI_EU,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM: SupportedModels.RELAY_SWITCH_2PM,
|
||||
}
|
||||
|
||||
NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -121,6 +123,7 @@ ENCRYPTED_MODELS = {
|
||||
SwitchbotModel.RGBICWW_STRIP_LIGHT,
|
||||
SwitchbotModel.RGBICWW_FLOOR_LAMP,
|
||||
SwitchbotModel.PLUG_MINI_EU,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM,
|
||||
}
|
||||
|
||||
ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
@@ -140,6 +143,7 @@ ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
SwitchbotModel.RGBICWW_STRIP_LIGHT: switchbot.SwitchbotRgbicLight,
|
||||
SwitchbotModel.RGBICWW_FLOOR_LAMP: switchbot.SwitchbotRgbicLight,
|
||||
SwitchbotModel.PLUG_MINI_EU: switchbot.SwitchbotRelaySwitch,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM: switchbot.SwitchbotRelaySwitch2PM,
|
||||
}
|
||||
|
||||
HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = {
|
||||
|
@@ -6,6 +6,7 @@ from collections.abc import Callable, Coroutine, Mapping
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
import switchbot
|
||||
from switchbot import Switchbot, SwitchbotDevice
|
||||
from switchbot.devices.device import SwitchbotOperationError
|
||||
|
||||
@@ -46,6 +47,7 @@ class SwitchbotEntity(
|
||||
model=coordinator.model, # Sometimes the modelName is missing from the advertisement data
|
||||
name=coordinator.device_name,
|
||||
)
|
||||
self._channel: int | None = None
|
||||
if ":" not in self._address:
|
||||
# MacOS Bluetooth addresses are not mac addresses
|
||||
return
|
||||
@@ -60,6 +62,8 @@ class SwitchbotEntity(
|
||||
@property
|
||||
def parsed_data(self) -> dict[str, Any]:
|
||||
"""Return parsed device data for this entity."""
|
||||
if isinstance(self.coordinator.device, switchbot.SwitchbotRelaySwitch2PM):
|
||||
return self.coordinator.device.get_parsed_data(self._channel)
|
||||
return self.coordinator.device.parsed_data
|
||||
|
||||
@property
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import switchbot
|
||||
from switchbot import HumidifierWaterLevel
|
||||
from switchbot.const.air_purifier import AirQualityLevel
|
||||
|
||||
@@ -25,8 +26,10 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator
|
||||
from .entity import SwitchbotEntity
|
||||
|
||||
@@ -133,13 +136,22 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Switchbot sensor based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
entities = [
|
||||
SwitchBotSensor(coordinator, sensor)
|
||||
for sensor in coordinator.device.parsed_data
|
||||
if sensor in SENSOR_TYPES
|
||||
]
|
||||
entities.append(SwitchbotRSSISensor(coordinator, "rssi"))
|
||||
async_add_entities(entities)
|
||||
sensor_entities: list[SensorEntity] = []
|
||||
if isinstance(coordinator.device, switchbot.SwitchbotRelaySwitch2PM):
|
||||
sensor_entities.extend(
|
||||
SwitchBotSensor(coordinator, sensor, channel)
|
||||
for channel in range(1, coordinator.device.channel + 1)
|
||||
for sensor in coordinator.device.get_parsed_data(channel)
|
||||
if sensor in SENSOR_TYPES
|
||||
)
|
||||
else:
|
||||
sensor_entities.extend(
|
||||
SwitchBotSensor(coordinator, sensor)
|
||||
for sensor in coordinator.device.parsed_data
|
||||
if sensor in SENSOR_TYPES
|
||||
)
|
||||
sensor_entities.append(SwitchbotRSSISensor(coordinator, "rssi"))
|
||||
async_add_entities(sensor_entities)
|
||||
|
||||
|
||||
class SwitchBotSensor(SwitchbotEntity, SensorEntity):
|
||||
@@ -149,13 +161,27 @@ class SwitchBotSensor(SwitchbotEntity, SensorEntity):
|
||||
self,
|
||||
coordinator: SwitchbotDataUpdateCoordinator,
|
||||
sensor: str,
|
||||
channel: int | None = None,
|
||||
) -> None:
|
||||
"""Initialize the Switchbot sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._sensor = sensor
|
||||
self._attr_unique_id = f"{coordinator.base_unique_id}-{sensor}"
|
||||
self._channel = channel
|
||||
self.entity_description = SENSOR_TYPES[sensor]
|
||||
|
||||
if channel:
|
||||
self._attr_unique_id = f"{coordinator.base_unique_id}-{sensor}-{channel}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(DOMAIN, f"{coordinator.base_unique_id}-channel-{channel}")
|
||||
},
|
||||
manufacturer="SwitchBot",
|
||||
model_id="RelaySwitch2PM",
|
||||
name=f"{coordinator.device_name} Channel {channel}",
|
||||
)
|
||||
else:
|
||||
self._attr_unique_id = f"{coordinator.base_unique_id}-{sensor}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | int | None:
|
||||
"""Return the state of the sensor."""
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import switchbot
|
||||
@@ -9,13 +10,16 @@ import switchbot
|
||||
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
||||
from homeassistant.const import STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator
|
||||
from .entity import SwitchbotSwitchedEntity
|
||||
from .entity import SwitchbotSwitchedEntity, exception_handler
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -24,7 +28,16 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Switchbot based on a config entry."""
|
||||
async_add_entities([SwitchBotSwitch(entry.runtime_data)])
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
if isinstance(coordinator.device, switchbot.SwitchbotRelaySwitch2PM):
|
||||
entries = [
|
||||
SwitchbotMultiChannelSwitch(coordinator, channel)
|
||||
for channel in range(1, coordinator.device.channel + 1)
|
||||
]
|
||||
async_add_entities(entries)
|
||||
else:
|
||||
async_add_entities([SwitchBotSwitch(coordinator)])
|
||||
|
||||
|
||||
class SwitchBotSwitch(SwitchbotSwitchedEntity, SwitchEntity, RestoreEntity):
|
||||
@@ -67,3 +80,49 @@ class SwitchBotSwitch(SwitchbotSwitchedEntity, SwitchEntity, RestoreEntity):
|
||||
**super().extra_state_attributes,
|
||||
"switch_mode": self._device.switch_mode(),
|
||||
}
|
||||
|
||||
|
||||
class SwitchbotMultiChannelSwitch(SwitchbotSwitchedEntity, SwitchEntity):
|
||||
"""Representation of a Switchbot multi-channel switch."""
|
||||
|
||||
_attr_device_class = SwitchDeviceClass.SWITCH
|
||||
_device: switchbot.Switchbot
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self, coordinator: SwitchbotDataUpdateCoordinator, channel: int
|
||||
) -> None:
|
||||
"""Initialize the Switchbot."""
|
||||
super().__init__(coordinator)
|
||||
self._channel = channel
|
||||
self._attr_unique_id = f"{coordinator.base_unique_id}-{channel}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{coordinator.base_unique_id}-channel-{channel}")},
|
||||
manufacturer="SwitchBot",
|
||||
model_id="RelaySwitch2PM",
|
||||
name=f"{coordinator.device_name} Channel {channel}",
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if device is on."""
|
||||
return self._device.is_on(self._channel)
|
||||
|
||||
@exception_handler
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn device on."""
|
||||
_LOGGER.debug(
|
||||
"Turn Switchbot device on %s, channel %d", self._address, self._channel
|
||||
)
|
||||
await self._device.turn_on(self._channel)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@exception_handler
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn device off."""
|
||||
_LOGGER.debug(
|
||||
"Turn Switchbot device off %s, channel %d", self._address, self._channel
|
||||
)
|
||||
await self._device.turn_off(self._channel)
|
||||
self.async_write_ha_state()
|
||||
|
@@ -270,6 +270,12 @@ async def make_device_data(
|
||||
)
|
||||
devices_data.humidifiers.append((device, coordinator))
|
||||
devices_data.sensors.append((device, coordinator))
|
||||
if isinstance(device, Device) and device.device_type == "Climate Panel":
|
||||
coordinator = await coordinator_for_device(
|
||||
hass, entry, api, device, coordinators_by_id
|
||||
)
|
||||
devices_data.binary_sensors.append((device, coordinator))
|
||||
devices_data.sensors.append((device, coordinator))
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
@@ -104,6 +104,10 @@ BINARY_SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
),
|
||||
"Hub 3": (MOVE_DETECTED_DESCRIPTION,),
|
||||
"Water Detector": (LEAK_DESCRIPTION,),
|
||||
"Climate Panel": (
|
||||
IS_LIGHT_DESCRIPTION,
|
||||
MOVE_DETECTED_DESCRIPTION,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
@@ -119,7 +119,6 @@ LIGHTLEVEL_DESCRIPTION = SensorEntityDescription(
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
)
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
"Bot": (BATTERY_DESCRIPTION,),
|
||||
"Battery Circulator Fan": (BATTERY_DESCRIPTION,),
|
||||
@@ -189,6 +188,11 @@ SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
"Contact Sensor": (BATTERY_DESCRIPTION,),
|
||||
"Water Detector": (BATTERY_DESCRIPTION,),
|
||||
"Humidifier": (TEMPERATURE_DESCRIPTION,),
|
||||
"Climate Panel": (
|
||||
TEMPERATURE_DESCRIPTION,
|
||||
HUMIDITY_DESCRIPTION,
|
||||
BATTERY_DESCRIPTION,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -226,7 +230,6 @@ class SwitchBotCloudSensor(SwitchBotCloudEntity, SensorEntity):
|
||||
"""Set attributes from coordinator data."""
|
||||
if not self.coordinator.data:
|
||||
return
|
||||
|
||||
if isinstance(
|
||||
self.entity_description,
|
||||
SwitchbotCloudSensorEntityDescription,
|
||||
|
@@ -43,6 +43,7 @@ from .const import (
|
||||
ATTR_AUTHENTICATION,
|
||||
ATTR_CALLBACK_QUERY_ID,
|
||||
ATTR_CAPTION,
|
||||
ATTR_CHAT_ACTION,
|
||||
ATTR_CHAT_ID,
|
||||
ATTR_DISABLE_NOTIF,
|
||||
ATTR_DISABLE_WEB_PREV,
|
||||
@@ -71,6 +72,17 @@ from .const import (
|
||||
ATTR_URL,
|
||||
ATTR_USERNAME,
|
||||
ATTR_VERIFY_SSL,
|
||||
CHAT_ACTION_CHOOSE_STICKER,
|
||||
CHAT_ACTION_FIND_LOCATION,
|
||||
CHAT_ACTION_RECORD_VIDEO,
|
||||
CHAT_ACTION_RECORD_VIDEO_NOTE,
|
||||
CHAT_ACTION_RECORD_VOICE,
|
||||
CHAT_ACTION_TYPING,
|
||||
CHAT_ACTION_UPLOAD_DOCUMENT,
|
||||
CHAT_ACTION_UPLOAD_PHOTO,
|
||||
CHAT_ACTION_UPLOAD_VIDEO,
|
||||
CHAT_ACTION_UPLOAD_VIDEO_NOTE,
|
||||
CHAT_ACTION_UPLOAD_VOICE,
|
||||
CONF_ALLOWED_CHAT_IDS,
|
||||
CONF_BOT_COUNT,
|
||||
CONF_CONFIG_ENTRY_ID,
|
||||
@@ -89,6 +101,7 @@ from .const import (
|
||||
SERVICE_EDIT_REPLYMARKUP,
|
||||
SERVICE_LEAVE_CHAT,
|
||||
SERVICE_SEND_ANIMATION,
|
||||
SERVICE_SEND_CHAT_ACTION,
|
||||
SERVICE_SEND_DOCUMENT,
|
||||
SERVICE_SEND_LOCATION,
|
||||
SERVICE_SEND_MESSAGE,
|
||||
@@ -153,6 +166,26 @@ SERVICE_SCHEMA_SEND_MESSAGE = BASE_SERVICE_SCHEMA.extend(
|
||||
{vol.Required(ATTR_MESSAGE): cv.string, vol.Optional(ATTR_TITLE): cv.string}
|
||||
)
|
||||
|
||||
SERVICE_SCHEMA_SEND_CHAT_ACTION = BASE_SERVICE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(ATTR_CHAT_ACTION): vol.In(
|
||||
(
|
||||
CHAT_ACTION_TYPING,
|
||||
CHAT_ACTION_UPLOAD_PHOTO,
|
||||
CHAT_ACTION_RECORD_VIDEO,
|
||||
CHAT_ACTION_UPLOAD_VIDEO,
|
||||
CHAT_ACTION_RECORD_VOICE,
|
||||
CHAT_ACTION_UPLOAD_VOICE,
|
||||
CHAT_ACTION_UPLOAD_DOCUMENT,
|
||||
CHAT_ACTION_CHOOSE_STICKER,
|
||||
CHAT_ACTION_FIND_LOCATION,
|
||||
CHAT_ACTION_RECORD_VIDEO_NOTE,
|
||||
CHAT_ACTION_UPLOAD_VIDEO_NOTE,
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_SCHEMA_SEND_FILE = BASE_SERVICE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(ATTR_URL): cv.string,
|
||||
@@ -268,6 +301,7 @@ SERVICE_SCHEMA_SET_MESSAGE_REACTION = vol.Schema(
|
||||
|
||||
SERVICE_MAP = {
|
||||
SERVICE_SEND_MESSAGE: SERVICE_SCHEMA_SEND_MESSAGE,
|
||||
SERVICE_SEND_CHAT_ACTION: SERVICE_SCHEMA_SEND_CHAT_ACTION,
|
||||
SERVICE_SEND_PHOTO: SERVICE_SCHEMA_SEND_FILE,
|
||||
SERVICE_SEND_STICKER: SERVICE_SCHEMA_SEND_STICKER,
|
||||
SERVICE_SEND_ANIMATION: SERVICE_SCHEMA_SEND_FILE,
|
||||
@@ -367,6 +401,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
messages = await notify_service.send_message(
|
||||
context=service.context, **kwargs
|
||||
)
|
||||
elif msgtype == SERVICE_SEND_CHAT_ACTION:
|
||||
messages = await notify_service.send_chat_action(
|
||||
context=service.context, **kwargs
|
||||
)
|
||||
elif msgtype in [
|
||||
SERVICE_SEND_PHOTO,
|
||||
SERVICE_SEND_ANIMATION,
|
||||
@@ -433,6 +471,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
if service_notif in [
|
||||
SERVICE_SEND_MESSAGE,
|
||||
SERVICE_SEND_CHAT_ACTION,
|
||||
SERVICE_SEND_PHOTO,
|
||||
SERVICE_SEND_ANIMATION,
|
||||
SERVICE_SEND_VIDEO,
|
||||
|
@@ -617,6 +617,28 @@ class TelegramNotificationService:
|
||||
context=context,
|
||||
)
|
||||
|
||||
async def send_chat_action(
|
||||
self,
|
||||
chat_action: str = "",
|
||||
target: Any = None,
|
||||
context: Context | None = None,
|
||||
**kwargs: Any,
|
||||
) -> dict[int, int]:
|
||||
"""Send a chat action to pre-allowed chat IDs."""
|
||||
result = {}
|
||||
for chat_id in self.get_target_chat_ids(target):
|
||||
_LOGGER.debug("Send action %s in chat ID %s", chat_action, chat_id)
|
||||
is_successful = await self._send_msg(
|
||||
self.bot.send_chat_action,
|
||||
"Error sending action",
|
||||
None,
|
||||
chat_id=chat_id,
|
||||
action=chat_action,
|
||||
context=context,
|
||||
)
|
||||
result[chat_id] = is_successful
|
||||
return result
|
||||
|
||||
async def send_file(
|
||||
self,
|
||||
file_type: str,
|
||||
|
@@ -32,6 +32,7 @@ ISSUE_DEPRECATED_YAML_IMPORT_ISSUE_ERROR = "deprecated_yaml_import_issue_error"
|
||||
|
||||
DEFAULT_TRUSTED_NETWORKS = [ip_network("149.154.160.0/20"), ip_network("91.108.4.0/22")]
|
||||
|
||||
SERVICE_SEND_CHAT_ACTION = "send_chat_action"
|
||||
SERVICE_SEND_MESSAGE = "send_message"
|
||||
SERVICE_SEND_PHOTO = "send_photo"
|
||||
SERVICE_SEND_STICKER = "send_sticker"
|
||||
@@ -59,10 +60,23 @@ PARSER_MD = "markdown"
|
||||
PARSER_MD2 = "markdownv2"
|
||||
PARSER_PLAIN_TEXT = "plain_text"
|
||||
|
||||
ATTR_CHAT_ACTION = "chat_action"
|
||||
ATTR_DATA = "data"
|
||||
ATTR_MESSAGE = "message"
|
||||
ATTR_TITLE = "title"
|
||||
|
||||
CHAT_ACTION_TYPING = "typing"
|
||||
CHAT_ACTION_UPLOAD_PHOTO = "upload_photo"
|
||||
CHAT_ACTION_RECORD_VIDEO = "record_video"
|
||||
CHAT_ACTION_UPLOAD_VIDEO = "upload_video"
|
||||
CHAT_ACTION_RECORD_VOICE = "record_voice"
|
||||
CHAT_ACTION_UPLOAD_VOICE = "upload_voice"
|
||||
CHAT_ACTION_UPLOAD_DOCUMENT = "upload_document"
|
||||
CHAT_ACTION_CHOOSE_STICKER = "choose_sticker"
|
||||
CHAT_ACTION_FIND_LOCATION = "find_location"
|
||||
CHAT_ACTION_RECORD_VIDEO_NOTE = "record_video_note"
|
||||
CHAT_ACTION_UPLOAD_VIDEO_NOTE = "upload_video_note"
|
||||
|
||||
ATTR_ARGS = "args"
|
||||
ATTR_AUTHENTICATION = "authentication"
|
||||
ATTR_CALLBACK_QUERY = "callback_query"
|
||||
|
@@ -3,6 +3,9 @@
|
||||
"send_message": {
|
||||
"service": "mdi:send"
|
||||
},
|
||||
"send_chat_action": {
|
||||
"service": "mdi:send"
|
||||
},
|
||||
"send_photo": {
|
||||
"service": "mdi:camera"
|
||||
},
|
||||
|
@@ -66,6 +66,38 @@ send_message:
|
||||
number:
|
||||
mode: box
|
||||
|
||||
send_chat_action:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
chat_action:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "typing"
|
||||
- "upload_photo"
|
||||
- "record_video"
|
||||
- "upload_video"
|
||||
- "record_voice"
|
||||
- "upload_voice"
|
||||
- "upload_document"
|
||||
- "choose_sticker"
|
||||
- "find_location"
|
||||
- "record_video_note"
|
||||
- "upload_video_note"
|
||||
translation_key: "chat_action"
|
||||
target:
|
||||
example: "[12345, 67890] or 12345"
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
message_thread_id:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
|
||||
send_photo:
|
||||
fields:
|
||||
config_entry_id:
|
||||
|
@@ -138,6 +138,21 @@
|
||||
"digest": "Digest",
|
||||
"bearer_token": "Bearer token"
|
||||
}
|
||||
},
|
||||
"chat_action": {
|
||||
"options": {
|
||||
"typing": "Typing",
|
||||
"upload_photo": "Uploading photo",
|
||||
"record_video": "Recording video",
|
||||
"upload_video": "Uploading video",
|
||||
"record_voice": "Recording voice",
|
||||
"upload_voice": "Uploading voice",
|
||||
"upload_document": "Uploading document",
|
||||
"choose_sticker": "Choosing sticker",
|
||||
"find_location": "Finding location",
|
||||
"record_video_note": "Recording video note",
|
||||
"upload_video_note": "Uploading video note"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -199,6 +214,28 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"send_chat_action": {
|
||||
"name": "Send chat action",
|
||||
"description": "Sends a chat action.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the chat action."
|
||||
},
|
||||
"chat_action": {
|
||||
"name": "Chat action",
|
||||
"description": "Chat action to be sent."
|
||||
},
|
||||
"target": {
|
||||
"name": "Target",
|
||||
"description": "An array of pre-authorized chat IDs to send the chat action to. If not present, first allowed chat ID is the default."
|
||||
},
|
||||
"message_thread_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::message_thread_id::name%]",
|
||||
"description": "[%key:component::telegram_bot::services::send_message::fields::message_thread_id::description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"send_photo": {
|
||||
"name": "Send photo",
|
||||
"description": "Sends a photo.",
|
||||
|
@@ -124,6 +124,7 @@ class DPCode(StrEnum):
|
||||
BASIC_WDR = "basic_wdr"
|
||||
BATTERY = "battery" # Used by non-standard contact sensor implementations
|
||||
BATTERY_PERCENTAGE = "battery_percentage" # Battery percentage
|
||||
BATTERY_POWER = "battery_power"
|
||||
BATTERY_STATE = "battery_state" # Battery state
|
||||
BATTERY_VALUE = "battery_value" # Battery value
|
||||
BRIGHT_CONTROLLER = "bright_controller"
|
||||
@@ -184,11 +185,17 @@ class DPCode(StrEnum):
|
||||
COUNTDOWN_LEFT = "countdown_left"
|
||||
COUNTDOWN_SET = "countdown_set" # Countdown setting
|
||||
CRY_DETECTION_SWITCH = "cry_detection_switch"
|
||||
CUML_E_EXPORT_OFFGRID1 = "cuml_e_export_offgrid1"
|
||||
CUMULATIVE_ENERGY_CHARGED = "cumulative_energy_charged"
|
||||
CUMULATIVE_ENERGY_DISCHARGED = "cumulative_energy_discharged"
|
||||
CUMULATIVE_ENERGY_GENERATED_PV = "cumulative_energy_generated_pv"
|
||||
CUMULATIVE_ENERGY_OUTPUT_INV = "cumulative_energy_output_inv"
|
||||
CUP_NUMBER = "cup_number" # NUmber of cups
|
||||
CUR_CURRENT = "cur_current" # Actual current
|
||||
CUR_NEUTRAL = "cur_neutral" # Total reverse energy
|
||||
CUR_POWER = "cur_power" # Actual power
|
||||
CUR_VOLTAGE = "cur_voltage" # Actual voltage
|
||||
CURRENT_SOC = "current_soc"
|
||||
DECIBEL_SENSITIVITY = "decibel_sensitivity"
|
||||
DECIBEL_SWITCH = "decibel_switch"
|
||||
DEHUMIDITY_SET_ENUM = "dehumidify_set_enum"
|
||||
@@ -240,6 +247,7 @@ class DPCode(StrEnum):
|
||||
HUMIDITY_SET = "humidity_set" # Humidity setting
|
||||
HUMIDITY_VALUE = "humidity_value" # Humidity
|
||||
INSTALLATION_HEIGHT = "installation_height"
|
||||
INVERTER_OUTPUT_POWER = "inverter_output_power"
|
||||
IPC_WORK_MODE = "ipc_work_mode"
|
||||
LED_TYPE_1 = "led_type_1"
|
||||
LED_TYPE_2 = "led_type_2"
|
||||
@@ -305,6 +313,9 @@ class DPCode(StrEnum):
|
||||
PUMP = "pump"
|
||||
PUMP_RESET = "pump_reset" # Water pump reset
|
||||
PUMP_TIME = "pump_time" # Water pump duration
|
||||
PV_POWER_CHANNEL_1 = "pv_power_channel_1"
|
||||
PV_POWER_CHANNEL_2 = "pv_power_channel_2"
|
||||
PV_POWER_TOTAL = "pv_power_total"
|
||||
RAIN_24H = "rain_24h" # Total daily rainfall in mm
|
||||
RAIN_RATE = "rain_rate" # Rain intensity in mm/h
|
||||
RECORD_MODE = "record_mode"
|
||||
|
@@ -1413,6 +1413,79 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
||||
# Wireless Switch
|
||||
# https://developer.tuya.com/en/docs/iot/s?id=Kbeoa9fkv6brp
|
||||
"wxkg": BATTERY_SENSORS, # Pressure Sensor
|
||||
# Micro Storage Inverter
|
||||
# Energy storage and solar PV inverter system with monitoring capabilities
|
||||
"xnyjcn": (
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.CURRENT_SOC,
|
||||
translation_key="battery_soc",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.PV_POWER_TOTAL,
|
||||
translation_key="total_pv_power",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.PV_POWER_CHANNEL_1,
|
||||
translation_key="pv_channel_power",
|
||||
translation_placeholders={"index": "1"},
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.PV_POWER_CHANNEL_2,
|
||||
translation_key="pv_channel_power",
|
||||
translation_placeholders={"index": "2"},
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.BATTERY_POWER,
|
||||
translation_key="battery_power",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.INVERTER_OUTPUT_POWER,
|
||||
translation_key="inverter_output_power",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.CUMULATIVE_ENERGY_GENERATED_PV,
|
||||
translation_key="lifetime_pv_energy",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.CUMULATIVE_ENERGY_OUTPUT_INV,
|
||||
translation_key="lifetime_inverter_output_energy",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.CUMULATIVE_ENERGY_DISCHARGED,
|
||||
translation_key="lifetime_battery_discharge_energy",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.CUMULATIVE_ENERGY_CHARGED,
|
||||
translation_key="lifetime_battery_charge_energy",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.CUML_E_EXPORT_OFFGRID1,
|
||||
translation_key="lifetime_offgrid_port_energy",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
),
|
||||
# https://developer.tuya.com/en/docs/iot/categoryylcg?id=Kaiuz3kc2e4gm
|
||||
"ylcg": (
|
||||
TuyaSensorEntityDescription(
|
||||
|
@@ -621,6 +621,36 @@
|
||||
"battery_state": {
|
||||
"name": "Battery state"
|
||||
},
|
||||
"battery_soc": {
|
||||
"name": "Battery SOC"
|
||||
},
|
||||
"battery_power": {
|
||||
"name": "Battery power"
|
||||
},
|
||||
"total_pv_power": {
|
||||
"name": "Total PV power"
|
||||
},
|
||||
"pv_channel_power": {
|
||||
"name": "PV channel {index} power"
|
||||
},
|
||||
"inverter_output_power": {
|
||||
"name": "Inverter output power"
|
||||
},
|
||||
"lifetime_pv_energy": {
|
||||
"name": "Lifetime PV energy"
|
||||
},
|
||||
"lifetime_inverter_output_energy": {
|
||||
"name": "Lifetime inverter output energy"
|
||||
},
|
||||
"lifetime_battery_discharge_energy": {
|
||||
"name": "Lifetime battery discharge energy"
|
||||
},
|
||||
"lifetime_battery_charge_energy": {
|
||||
"name": "Lifetime battery charge energy"
|
||||
},
|
||||
"lifetime_offgrid_port_energy": {
|
||||
"name": "Lifetime off-grid port energy"
|
||||
},
|
||||
"gas": {
|
||||
"name": "Gas"
|
||||
},
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiounifi"],
|
||||
"requirements": ["aiounifi==86"],
|
||||
"requirements": ["aiounifi==87"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
89
homeassistant/components/usage_prediction/__init__.py
Normal file
89
homeassistant/components/usage_prediction/__init__.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""The usage prediction integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import common_control
|
||||
from .const import DATA_CACHE, DOMAIN
|
||||
from .models import EntityUsageDataCache, EntityUsagePredictions
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
CACHE_DURATION = timedelta(hours=24)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the usage prediction integration."""
|
||||
websocket_api.async_register_command(hass, ws_common_control)
|
||||
hass.data[DATA_CACHE] = {}
|
||||
return True
|
||||
|
||||
|
||||
@websocket_api.websocket_command({"type": f"{DOMAIN}/common_control"})
|
||||
@websocket_api.async_response
|
||||
async def ws_common_control(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Handle usage prediction common control WebSocket API."""
|
||||
result = await get_cached_common_control(hass, connection.user.id)
|
||||
time_category = common_control.time_category(dt_util.now().hour)
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"entities": getattr(result, time_category),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def get_cached_common_control(
|
||||
hass: HomeAssistant, user_id: str
|
||||
) -> EntityUsagePredictions:
|
||||
"""Get cached common control predictions or fetch new ones.
|
||||
|
||||
Returns cached data if it's less than 24 hours old,
|
||||
otherwise fetches new data and caches it.
|
||||
"""
|
||||
# Create a unique storage key for this user
|
||||
storage_key = user_id
|
||||
|
||||
cached_data = hass.data[DATA_CACHE].get(storage_key)
|
||||
|
||||
if isinstance(cached_data, asyncio.Task):
|
||||
# If there's an ongoing task to fetch data, await its result
|
||||
return await cached_data
|
||||
|
||||
# Check if cache is valid (less than 24 hours old)
|
||||
if cached_data is not None:
|
||||
if (dt_util.utcnow() - cached_data.timestamp) < CACHE_DURATION:
|
||||
# Cache is still valid, return the cached predictions
|
||||
return cached_data.predictions
|
||||
|
||||
# Create task fetching data
|
||||
task = hass.async_create_task(
|
||||
common_control.async_predict_common_control(hass, user_id)
|
||||
)
|
||||
hass.data[DATA_CACHE][storage_key] = task
|
||||
|
||||
try:
|
||||
predictions = await task
|
||||
except Exception:
|
||||
# If the task fails, remove it from cache to allow retries
|
||||
hass.data[DATA_CACHE].pop(storage_key)
|
||||
raise
|
||||
|
||||
hass.data[DATA_CACHE][storage_key] = EntityUsageDataCache(
|
||||
predictions=predictions,
|
||||
)
|
||||
|
||||
return predictions
|
241
homeassistant/components/usage_prediction/common_control.py
Normal file
241
homeassistant/components/usage_prediction/common_control.py
Normal file
@@ -0,0 +1,241 @@
|
||||
"""Code to generate common control usage patterns."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import Counter
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
from functools import cache
|
||||
import logging
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.db_schema import EventData, Events, EventTypes
|
||||
from homeassistant.components.recorder.models import uuid_hex_to_bytes_or_none
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.json import json_loads_object
|
||||
|
||||
from .models import EntityUsagePredictions
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Time categories for usage patterns
|
||||
TIME_CATEGORIES = ["morning", "afternoon", "evening", "night"]
|
||||
|
||||
RESULTS_TO_INCLUDE = 8
|
||||
|
||||
# List of domains for which we want to track usage
|
||||
ALLOWED_DOMAINS = {
|
||||
# Entity platforms
|
||||
Platform.AIR_QUALITY,
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CALENDAR,
|
||||
Platform.CAMERA,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.DATE,
|
||||
Platform.DATETIME,
|
||||
Platform.FAN,
|
||||
Platform.HUMIDIFIER,
|
||||
Platform.IMAGE,
|
||||
Platform.LAWN_MOWER,
|
||||
Platform.LIGHT,
|
||||
Platform.LOCK,
|
||||
Platform.MEDIA_PLAYER,
|
||||
Platform.NUMBER,
|
||||
Platform.SCENE,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
Platform.TEXT,
|
||||
Platform.TIME,
|
||||
Platform.TODO,
|
||||
Platform.UPDATE,
|
||||
Platform.VACUUM,
|
||||
Platform.VALVE,
|
||||
Platform.WAKE_WORD,
|
||||
Platform.WATER_HEATER,
|
||||
Platform.WEATHER,
|
||||
# Helpers with own domain
|
||||
"counter",
|
||||
"group",
|
||||
"input_boolean",
|
||||
"input_button",
|
||||
"input_datetime",
|
||||
"input_number",
|
||||
"input_select",
|
||||
"input_text",
|
||||
"schedule",
|
||||
"timer",
|
||||
}
|
||||
|
||||
|
||||
@cache
|
||||
def time_category(hour: int) -> Literal["morning", "afternoon", "evening", "night"]:
|
||||
"""Determine the time category for a given hour."""
|
||||
if 6 <= hour < 12:
|
||||
return "morning"
|
||||
if 12 <= hour < 18:
|
||||
return "afternoon"
|
||||
if 18 <= hour < 22:
|
||||
return "evening"
|
||||
return "night"
|
||||
|
||||
|
||||
async def async_predict_common_control(
|
||||
hass: HomeAssistant, user_id: str
|
||||
) -> EntityUsagePredictions:
|
||||
"""Generate a list of commonly used entities for a user.
|
||||
|
||||
Args:
|
||||
hass: Home Assistant instance
|
||||
user_id: User ID to filter events by.
|
||||
|
||||
Returns:
|
||||
Dictionary with time categories as keys and lists of most common entity IDs as values
|
||||
"""
|
||||
# Get the recorder instance to ensure it's ready
|
||||
recorder = get_instance(hass)
|
||||
|
||||
# Execute the database operation in the recorder's executor
|
||||
return await recorder.async_add_executor_job(
|
||||
_fetch_with_session, hass, _fetch_and_process_data, user_id
|
||||
)
|
||||
|
||||
|
||||
def _fetch_and_process_data(session: Session, user_id: str) -> EntityUsagePredictions:
|
||||
"""Fetch and process service call events from the database."""
|
||||
# Prepare a dictionary to track results
|
||||
results: dict[str, Counter[str]] = {
|
||||
time_cat: Counter() for time_cat in TIME_CATEGORIES
|
||||
}
|
||||
|
||||
# Keep track of contexts that we processed so that we will only process
|
||||
# the first service call in a context, and not subsequent calls.
|
||||
context_processed: set[bytes] = set()
|
||||
thirty_days_ago_ts = (dt_util.utcnow() - timedelta(days=30)).timestamp()
|
||||
user_id_bytes = uuid_hex_to_bytes_or_none(user_id)
|
||||
if not user_id_bytes:
|
||||
raise ValueError("Invalid user_id format")
|
||||
|
||||
# Build the main query for events with their data
|
||||
query = (
|
||||
select(
|
||||
Events.context_id_bin,
|
||||
Events.time_fired_ts,
|
||||
EventData.shared_data,
|
||||
)
|
||||
.select_from(Events)
|
||||
.outerjoin(EventData, Events.data_id == EventData.data_id)
|
||||
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
|
||||
.where(Events.time_fired_ts >= thirty_days_ago_ts)
|
||||
.where(Events.context_user_id_bin == user_id_bytes)
|
||||
.where(EventTypes.event_type == "call_service")
|
||||
.order_by(Events.time_fired_ts)
|
||||
)
|
||||
|
||||
# Execute the query
|
||||
context_id: bytes
|
||||
time_fired_ts: float
|
||||
shared_data: str | None
|
||||
local_time_zone = dt_util.get_default_time_zone()
|
||||
for context_id, time_fired_ts, shared_data in (
|
||||
session.connection().execute(query).all()
|
||||
):
|
||||
# Skip if we have already processed an event that was part of this context
|
||||
if context_id in context_processed:
|
||||
continue
|
||||
|
||||
# Mark this context as processed
|
||||
context_processed.add(context_id)
|
||||
|
||||
# Parse the event data
|
||||
if not shared_data:
|
||||
continue
|
||||
|
||||
try:
|
||||
event_data = json_loads_object(shared_data)
|
||||
except (ValueError, TypeError) as err:
|
||||
_LOGGER.debug("Failed to parse event data: %s", err)
|
||||
continue
|
||||
|
||||
# Empty event data, skipping
|
||||
if not event_data:
|
||||
continue
|
||||
|
||||
service_data = cast(dict[str, Any] | None, event_data.get("service_data"))
|
||||
|
||||
# No service data found, skipping
|
||||
if not service_data:
|
||||
continue
|
||||
|
||||
entity_ids: str | list[str] | None
|
||||
if (target := service_data.get("target")) and (
|
||||
target_entity_ids := target.get("entity_id")
|
||||
):
|
||||
entity_ids = target_entity_ids
|
||||
else:
|
||||
entity_ids = service_data.get("entity_id")
|
||||
|
||||
# No entity IDs found, skip this event
|
||||
if entity_ids is None:
|
||||
continue
|
||||
|
||||
if not isinstance(entity_ids, list):
|
||||
entity_ids = [entity_ids]
|
||||
|
||||
# Filter out entity IDs that are not in allowed domains
|
||||
entity_ids = [
|
||||
entity_id
|
||||
for entity_id in entity_ids
|
||||
if entity_id.split(".")[0] in ALLOWED_DOMAINS
|
||||
]
|
||||
|
||||
if not entity_ids:
|
||||
continue
|
||||
|
||||
# Convert timestamp to datetime and determine time category
|
||||
if time_fired_ts:
|
||||
# Convert to local time for time category determination
|
||||
period = time_category(
|
||||
datetime.fromtimestamp(time_fired_ts, local_time_zone).hour
|
||||
)
|
||||
|
||||
# Count entity usage
|
||||
for entity_id in entity_ids:
|
||||
results[period][entity_id] += 1
|
||||
|
||||
return EntityUsagePredictions(
|
||||
morning=[
|
||||
ent_id for (ent_id, _) in results["morning"].most_common(RESULTS_TO_INCLUDE)
|
||||
],
|
||||
afternoon=[
|
||||
ent_id
|
||||
for (ent_id, _) in results["afternoon"].most_common(RESULTS_TO_INCLUDE)
|
||||
],
|
||||
evening=[
|
||||
ent_id for (ent_id, _) in results["evening"].most_common(RESULTS_TO_INCLUDE)
|
||||
],
|
||||
night=[
|
||||
ent_id for (ent_id, _) in results["night"].most_common(RESULTS_TO_INCLUDE)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def _fetch_with_session(
|
||||
hass: HomeAssistant,
|
||||
fetch_func: Callable[[Session], EntityUsagePredictions],
|
||||
*args: object,
|
||||
) -> EntityUsagePredictions:
|
||||
"""Execute a fetch function with a database session."""
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
return fetch_func(session, *args)
|
13
homeassistant/components/usage_prediction/const.py
Normal file
13
homeassistant/components/usage_prediction/const.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Constants for the usage prediction integration."""
|
||||
|
||||
import asyncio
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .models import EntityUsageDataCache, EntityUsagePredictions
|
||||
|
||||
DOMAIN = "usage_prediction"
|
||||
|
||||
DATA_CACHE: HassKey[
|
||||
dict[str, asyncio.Task[EntityUsagePredictions] | EntityUsageDataCache]
|
||||
] = HassKey("usage_prediction")
|
10
homeassistant/components/usage_prediction/manifest.json
Normal file
10
homeassistant/components/usage_prediction/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "usage_prediction",
|
||||
"name": "Usage Prediction",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["http", "recorder"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/usage_prediction",
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal"
|
||||
}
|
24
homeassistant/components/usage_prediction/models.py
Normal file
24
homeassistant/components/usage_prediction/models.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Models for the usage prediction integration."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
|
||||
@dataclass
|
||||
class EntityUsagePredictions:
|
||||
"""Prediction which entities are likely to be used in each time category."""
|
||||
|
||||
morning: list[str] = field(default_factory=list)
|
||||
afternoon: list[str] = field(default_factory=list)
|
||||
evening: list[str] = field(default_factory=list)
|
||||
night: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EntityUsageDataCache:
|
||||
"""Data model for entity usage prediction."""
|
||||
|
||||
predictions: EntityUsagePredictions
|
||||
timestamp: datetime = field(default_factory=dt_util.utcnow)
|
3
homeassistant/components/usage_prediction/strings.json
Normal file
3
homeassistant/components/usage_prediction/strings.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"title": "Usage Prediction"
|
||||
}
|
@@ -17,6 +17,7 @@ from homeassistant.helpers.start import async_at_started
|
||||
from .const import DOMAIN, LOGGER, format_dispatch_call
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.EVENT,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
104
homeassistant/components/weatherflow/event.py
Normal file
104
homeassistant/components/weatherflow/event.py
Normal file
@@ -0,0 +1,104 @@
|
||||
"""Event entities for the WeatherFlow integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyweatherflowudp.device import EVENT_RAIN_START, EVENT_STRIKE, WeatherFlowDevice
|
||||
|
||||
from homeassistant.components.event import EventEntity, EventEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, LOGGER, format_dispatch_call
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class WeatherFlowEventEntityDescription(EventEntityDescription):
|
||||
"""Describes a WeatherFlow event entity."""
|
||||
|
||||
wf_event: str
|
||||
event_types: list[str]
|
||||
|
||||
|
||||
EVENT_DESCRIPTIONS: list[WeatherFlowEventEntityDescription] = [
|
||||
WeatherFlowEventEntityDescription(
|
||||
key="precip_start_event",
|
||||
translation_key="precip_start_event",
|
||||
event_types=["precipitation_start"],
|
||||
wf_event=EVENT_RAIN_START,
|
||||
),
|
||||
WeatherFlowEventEntityDescription(
|
||||
key="lightning_strike_event",
|
||||
translation_key="lightning_strike_event",
|
||||
event_types=["lightning_strike"],
|
||||
wf_event=EVENT_STRIKE,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up WeatherFlow event entities using config entry."""
|
||||
|
||||
@callback
|
||||
def async_add_events(device: WeatherFlowDevice) -> None:
|
||||
LOGGER.debug("Adding events for %s", device)
|
||||
async_add_entities(
|
||||
WeatherFlowEventEntity(device, description)
|
||||
for description in EVENT_DESCRIPTIONS
|
||||
)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
format_dispatch_call(config_entry),
|
||||
async_add_events,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class WeatherFlowEventEntity(EventEntity):
|
||||
"""Generic WeatherFlow event entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: WeatherFlowEventEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: WeatherFlowDevice,
|
||||
description: WeatherFlowEventEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the WeatherFlow event entity."""
|
||||
|
||||
self.device = device
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.serial_number)},
|
||||
manufacturer="WeatherFlow",
|
||||
model=device.model,
|
||||
name=device.serial_number,
|
||||
sw_version=device.firmware_revision,
|
||||
)
|
||||
self._attr_unique_id = f"{device.serial_number}_{description.key}"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to the configured WeatherFlow device event."""
|
||||
self.async_on_remove(
|
||||
self.device.on(self.entity_description.wf_event, self._handle_event)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_event(self, event) -> None:
|
||||
self._trigger_event(
|
||||
self.entity_description.event_types[0],
|
||||
{},
|
||||
)
|
||||
self.async_write_ha_state()
|
@@ -38,6 +38,14 @@
|
||||
"337.5": "mdi:arrow-up"
|
||||
}
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"lightning_strike_event": {
|
||||
"default": "mdi:weather-lightning"
|
||||
},
|
||||
"precip_start_event": {
|
||||
"default": "mdi:weather-rainy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -79,6 +79,14 @@
|
||||
"wind_lull": {
|
||||
"name": "Wind lull"
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"lightning_strike_event": {
|
||||
"name": "Lightning strike"
|
||||
},
|
||||
"precip_start_event": {
|
||||
"name": "Precipitation start"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
104
homeassistant/components/workday/calendar.py
Normal file
104
homeassistant/components/workday/calendar.py
Normal file
@@ -0,0 +1,104 @@
|
||||
"""Workday Calendar."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from holidays import HolidayBase
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import WorkdayConfigEntry
|
||||
from .const import CONF_EXCLUDES, CONF_OFFSET, CONF_WORKDAYS
|
||||
from .entity import BaseWorkdayEntity
|
||||
|
||||
CALENDAR_DAYS_AHEAD = 365
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: WorkdayConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Holiday Calendar config entry."""
|
||||
days_offset: int = int(entry.options[CONF_OFFSET])
|
||||
excludes: list[str] = entry.options[CONF_EXCLUDES]
|
||||
sensor_name: str = entry.options[CONF_NAME]
|
||||
workdays: list[str] = entry.options[CONF_WORKDAYS]
|
||||
obj_holidays = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
WorkdayCalendarEntity(
|
||||
obj_holidays,
|
||||
workdays,
|
||||
excludes,
|
||||
days_offset,
|
||||
sensor_name,
|
||||
entry.entry_id,
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class WorkdayCalendarEntity(BaseWorkdayEntity, CalendarEntity):
|
||||
"""Representation of a Workday Calendar."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
obj_holidays: HolidayBase,
|
||||
workdays: list[str],
|
||||
excludes: list[str],
|
||||
days_offset: int,
|
||||
name: str,
|
||||
entry_id: str,
|
||||
) -> None:
|
||||
"""Initialize WorkdayCalendarEntity."""
|
||||
super().__init__(
|
||||
obj_holidays,
|
||||
workdays,
|
||||
excludes,
|
||||
days_offset,
|
||||
name,
|
||||
entry_id,
|
||||
)
|
||||
self._attr_unique_id = entry_id
|
||||
self._attr_event = None
|
||||
self.event_list: list[CalendarEvent] = []
|
||||
self._name = name
|
||||
|
||||
def update_data(self, now: datetime) -> None:
|
||||
"""Update data."""
|
||||
event_list = []
|
||||
for i in range(CALENDAR_DAYS_AHEAD):
|
||||
future_date = now.date() + timedelta(days=i)
|
||||
if self.date_is_workday(future_date):
|
||||
event = CalendarEvent(
|
||||
summary=self._name,
|
||||
start=future_date,
|
||||
end=future_date,
|
||||
)
|
||||
event_list.append(event)
|
||||
self.event_list = event_list
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
"""Return the next upcoming event."""
|
||||
return (
|
||||
sorted(self.event_list, key=lambda e: e.start)[0]
|
||||
if self.event_list
|
||||
else None
|
||||
)
|
||||
|
||||
async def async_get_events(
|
||||
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
||||
) -> list[CalendarEvent]:
|
||||
"""Get all events in a specific time frame."""
|
||||
return [
|
||||
workday
|
||||
for workday in self.event_list
|
||||
if start_date.date() <= workday.start <= end_date.date()
|
||||
]
|
@@ -11,7 +11,7 @@ LOGGER = logging.getLogger(__package__)
|
||||
ALLOWED_DAYS = [*WEEKDAYS, "holiday"]
|
||||
|
||||
DOMAIN = "workday"
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.CALENDAR]
|
||||
|
||||
CONF_PROVINCE = "province"
|
||||
CONF_WORKDAYS = "workdays"
|
||||
|
@@ -212,6 +212,11 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"calendar": {
|
||||
"workday": {
|
||||
"name": "[%key:component::calendar::title%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
@@ -8,7 +8,6 @@ from homeassistant.const import (
|
||||
ATTR_GPS_ACCURACY,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_CONDITION,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_ZONE,
|
||||
STATE_UNAVAILABLE,
|
||||
@@ -29,7 +28,6 @@ from . import in_zone
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "zone",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required("zone"): cv.entity_ids,
|
||||
# To support use_trigger_value in automation
|
||||
|
@@ -20,7 +20,12 @@ from homeassistant.const import (
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
TriggerActionType,
|
||||
TriggerData,
|
||||
TriggerInfo,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from ..const import (
|
||||
@@ -136,131 +141,18 @@ async def async_validate_trigger_config(
|
||||
return config
|
||||
|
||||
|
||||
async def async_attach_trigger(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
action: TriggerActionType,
|
||||
trigger_info: TriggerInfo,
|
||||
*,
|
||||
platform_type: str = PLATFORM_TYPE,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Listen for state changes based on configuration."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
if config[ATTR_EVENT_SOURCE] == "node" and not async_get_nodes_from_targets(
|
||||
hass, config, dev_reg=dev_reg
|
||||
):
|
||||
raise ValueError(
|
||||
f"No nodes found for given {ATTR_DEVICE_ID}s or {ATTR_ENTITY_ID}s."
|
||||
)
|
||||
|
||||
event_source = config[ATTR_EVENT_SOURCE]
|
||||
event_name = config[ATTR_EVENT]
|
||||
event_data_filter = config.get(ATTR_EVENT_DATA, {})
|
||||
|
||||
unsubs: list[Callable] = []
|
||||
job = HassJob(action)
|
||||
|
||||
trigger_data = trigger_info["trigger_data"]
|
||||
|
||||
@callback
|
||||
def async_on_event(event_data: dict, device: dr.DeviceEntry | None = None) -> None:
|
||||
"""Handle event."""
|
||||
for key, val in event_data_filter.items():
|
||||
if key not in event_data:
|
||||
return
|
||||
if (
|
||||
config[ATTR_PARTIAL_DICT_MATCH]
|
||||
and isinstance(event_data[key], dict)
|
||||
and isinstance(val, dict)
|
||||
):
|
||||
for key2, val2 in val.items():
|
||||
if key2 not in event_data[key] or event_data[key][key2] != val2:
|
||||
return
|
||||
continue
|
||||
if event_data[key] != val:
|
||||
return
|
||||
|
||||
payload = {
|
||||
**trigger_data,
|
||||
CONF_PLATFORM: platform_type,
|
||||
ATTR_EVENT_SOURCE: event_source,
|
||||
ATTR_EVENT: event_name,
|
||||
ATTR_EVENT_DATA: event_data,
|
||||
}
|
||||
|
||||
primary_desc = f"Z-Wave JS '{event_source}' event '{event_name}' was emitted"
|
||||
|
||||
if device:
|
||||
device_name = device.name_by_user or device.name
|
||||
payload[ATTR_DEVICE_ID] = device.id
|
||||
home_and_node_id = get_home_and_node_id_from_device_entry(device)
|
||||
assert home_and_node_id
|
||||
payload[ATTR_NODE_ID] = home_and_node_id[1]
|
||||
payload["description"] = f"{primary_desc} on {device_name}"
|
||||
else:
|
||||
payload["description"] = primary_desc
|
||||
|
||||
payload["description"] = (
|
||||
f"{payload['description']} with event data: {event_data}"
|
||||
)
|
||||
|
||||
hass.async_run_hass_job(job, {"trigger": payload})
|
||||
|
||||
@callback
|
||||
def async_remove() -> None:
|
||||
"""Remove state listeners async."""
|
||||
for unsub in unsubs:
|
||||
unsub()
|
||||
unsubs.clear()
|
||||
|
||||
@callback
|
||||
def _create_zwave_listeners() -> None:
|
||||
"""Create Z-Wave JS listeners."""
|
||||
async_remove()
|
||||
# Nodes list can come from different drivers and we will need to listen to
|
||||
# server connections for all of them.
|
||||
drivers: set[Driver] = set()
|
||||
if not (nodes := async_get_nodes_from_targets(hass, config, dev_reg=dev_reg)):
|
||||
entry_id = config[ATTR_CONFIG_ENTRY_ID]
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
assert entry
|
||||
client = entry.runtime_data.client
|
||||
driver = client.driver
|
||||
assert driver
|
||||
drivers.add(driver)
|
||||
if event_source == "controller":
|
||||
unsubs.append(driver.controller.on(event_name, async_on_event))
|
||||
else:
|
||||
unsubs.append(driver.on(event_name, async_on_event))
|
||||
|
||||
for node in nodes:
|
||||
driver = node.client.driver
|
||||
assert driver is not None # The node comes from the driver.
|
||||
drivers.add(driver)
|
||||
device_identifier = get_device_id(driver, node)
|
||||
device = dev_reg.async_get_device(identifiers={device_identifier})
|
||||
assert device
|
||||
# We need to store the device for the callback
|
||||
unsubs.append(
|
||||
node.on(event_name, functools.partial(async_on_event, device=device))
|
||||
)
|
||||
unsubs.extend(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{driver.controller.home_id}_connected_to_server",
|
||||
_create_zwave_listeners,
|
||||
)
|
||||
for driver in drivers
|
||||
)
|
||||
|
||||
_create_zwave_listeners()
|
||||
|
||||
return async_remove
|
||||
|
||||
|
||||
class EventTrigger(Trigger):
|
||||
"""Z-Wave JS event trigger."""
|
||||
|
||||
_event_source: str
|
||||
_event_name: str
|
||||
_event_data_filter: dict
|
||||
_job: HassJob
|
||||
_trigger_data: TriggerData
|
||||
_unsubs: list[Callable]
|
||||
|
||||
_platform_type = PLATFORM_TYPE
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize trigger."""
|
||||
self._config = config
|
||||
@@ -279,6 +171,126 @@ class EventTrigger(Trigger):
|
||||
trigger_info: TriggerInfo,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
return await async_attach_trigger(
|
||||
self._hass, self._config, action, trigger_info
|
||||
dev_reg = dr.async_get(self._hass)
|
||||
config = self._config
|
||||
if config[ATTR_EVENT_SOURCE] == "node" and not async_get_nodes_from_targets(
|
||||
self._hass, config, dev_reg=dev_reg
|
||||
):
|
||||
raise ValueError(
|
||||
f"No nodes found for given {ATTR_DEVICE_ID}s or {ATTR_ENTITY_ID}s."
|
||||
)
|
||||
|
||||
self._event_source = config[ATTR_EVENT_SOURCE]
|
||||
self._event_name = config[ATTR_EVENT]
|
||||
self._event_data_filter = config.get(ATTR_EVENT_DATA, {})
|
||||
self._job = HassJob(action)
|
||||
self._trigger_data = trigger_info["trigger_data"]
|
||||
self._unsubs: list[Callable] = []
|
||||
|
||||
self._create_zwave_listeners()
|
||||
return self._async_remove
|
||||
|
||||
@callback
|
||||
def _async_on_event(
|
||||
self, event_data: dict, device: dr.DeviceEntry | None = None
|
||||
) -> None:
|
||||
"""Handle event."""
|
||||
for key, val in self._event_data_filter.items():
|
||||
if key not in event_data:
|
||||
return
|
||||
if (
|
||||
self._config[ATTR_PARTIAL_DICT_MATCH]
|
||||
and isinstance(event_data[key], dict)
|
||||
and isinstance(val, dict)
|
||||
):
|
||||
for key2, val2 in val.items():
|
||||
if key2 not in event_data[key] or event_data[key][key2] != val2:
|
||||
return
|
||||
continue
|
||||
if event_data[key] != val:
|
||||
return
|
||||
|
||||
payload = {
|
||||
**self._trigger_data,
|
||||
CONF_PLATFORM: self._platform_type,
|
||||
ATTR_EVENT_SOURCE: self._event_source,
|
||||
ATTR_EVENT: self._event_name,
|
||||
ATTR_EVENT_DATA: event_data,
|
||||
}
|
||||
|
||||
primary_desc = (
|
||||
f"Z-Wave JS '{self._event_source}' event '{self._event_name}' was emitted"
|
||||
)
|
||||
|
||||
if device:
|
||||
device_name = device.name_by_user or device.name
|
||||
payload[ATTR_DEVICE_ID] = device.id
|
||||
home_and_node_id = get_home_and_node_id_from_device_entry(device)
|
||||
assert home_and_node_id
|
||||
payload[ATTR_NODE_ID] = home_and_node_id[1]
|
||||
payload["description"] = f"{primary_desc} on {device_name}"
|
||||
else:
|
||||
payload["description"] = primary_desc
|
||||
|
||||
payload["description"] = (
|
||||
f"{payload['description']} with event data: {event_data}"
|
||||
)
|
||||
|
||||
self._hass.async_run_hass_job(self._job, {"trigger": payload})
|
||||
|
||||
@callback
|
||||
def _async_remove(self) -> None:
|
||||
"""Remove state listeners async."""
|
||||
for unsub in self._unsubs:
|
||||
unsub()
|
||||
self._unsubs.clear()
|
||||
|
||||
@callback
|
||||
def _create_zwave_listeners(self) -> None:
|
||||
"""Create Z-Wave JS listeners."""
|
||||
self._async_remove()
|
||||
# Nodes list can come from different drivers and we will need to listen to
|
||||
# server connections for all of them.
|
||||
drivers: set[Driver] = set()
|
||||
dev_reg = dr.async_get(self._hass)
|
||||
if not (
|
||||
nodes := async_get_nodes_from_targets(
|
||||
self._hass, self._config, dev_reg=dev_reg
|
||||
)
|
||||
):
|
||||
entry_id = self._config[ATTR_CONFIG_ENTRY_ID]
|
||||
entry = self._hass.config_entries.async_get_entry(entry_id)
|
||||
assert entry
|
||||
client = entry.runtime_data.client
|
||||
driver = client.driver
|
||||
assert driver
|
||||
drivers.add(driver)
|
||||
if self._event_source == "controller":
|
||||
self._unsubs.append(
|
||||
driver.controller.on(self._event_name, self._async_on_event)
|
||||
)
|
||||
else:
|
||||
self._unsubs.append(driver.on(self._event_name, self._async_on_event))
|
||||
|
||||
for node in nodes:
|
||||
driver = node.client.driver
|
||||
assert driver is not None # The node comes from the driver.
|
||||
drivers.add(driver)
|
||||
device_identifier = get_device_id(driver, node)
|
||||
device = dev_reg.async_get_device(identifiers={device_identifier})
|
||||
assert device
|
||||
# We need to store the device for the callback
|
||||
self._unsubs.append(
|
||||
node.on(
|
||||
self._event_name,
|
||||
functools.partial(self._async_on_event, device=device),
|
||||
)
|
||||
)
|
||||
self._unsubs.extend(
|
||||
async_dispatcher_connect(
|
||||
self._hass,
|
||||
f"{DOMAIN}_{driver.controller.home_id}_connected_to_server",
|
||||
self._create_zwave_listeners,
|
||||
)
|
||||
for driver in drivers
|
||||
)
|
||||
|
@@ -898,6 +898,7 @@ class UnitOfSpeed(StrEnum):
|
||||
BEAUFORT = "Beaufort"
|
||||
FEET_PER_SECOND = "ft/s"
|
||||
INCHES_PER_SECOND = "in/s"
|
||||
METERS_PER_MINUTE = "m/min"
|
||||
METERS_PER_SECOND = "m/s"
|
||||
KILOMETERS_PER_HOUR = "km/h"
|
||||
KNOTS = "kn"
|
||||
|
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -559,6 +559,7 @@ FLOWS = {
|
||||
"sabnzbd",
|
||||
"samsungtv",
|
||||
"sanix",
|
||||
"satel_integra",
|
||||
"schlage",
|
||||
"scrape",
|
||||
"screenlogic",
|
||||
|
@@ -5131,7 +5131,7 @@
|
||||
},
|
||||
"prowl": {
|
||||
"name": "Prowl",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push"
|
||||
},
|
||||
@@ -5728,8 +5728,9 @@
|
||||
"satel_integra": {
|
||||
"name": "Satel Integra",
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push"
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"single_config_entry": true
|
||||
},
|
||||
"schlage": {
|
||||
"name": "Schlage",
|
||||
|
@@ -1108,11 +1108,21 @@ def key_value_schemas(
|
||||
value_schemas: ValueSchemas,
|
||||
default_schema: VolSchemaType | Callable[[Any], dict[str, Any]] | None = None,
|
||||
default_description: str | None = None,
|
||||
list_alternatives: bool = True,
|
||||
) -> Callable[[Any], dict[Hashable, Any]]:
|
||||
"""Create a validator that validates based on a value for specific key.
|
||||
|
||||
This gives better error messages.
|
||||
|
||||
default_schema: An optional schema to use if the key value is not in value_schemas.
|
||||
default_description: A description of what is expected by the default schema, this
|
||||
will be added to the error message.
|
||||
list_alternatives: If True, list the keys in `value_schemas` in the error message.
|
||||
"""
|
||||
if not list_alternatives and not default_description:
|
||||
raise ValueError(
|
||||
"default_description must be provided if list_alternatives is False"
|
||||
)
|
||||
|
||||
def key_value_validator(value: Any) -> dict[Hashable, Any]:
|
||||
if not isinstance(value, dict):
|
||||
@@ -1127,9 +1137,13 @@ def key_value_schemas(
|
||||
with contextlib.suppress(vol.Invalid):
|
||||
return cast(dict[Hashable, Any], default_schema(value))
|
||||
|
||||
alternatives = ", ".join(str(alternative) for alternative in value_schemas)
|
||||
if default_description:
|
||||
alternatives = f"{alternatives}, {default_description}"
|
||||
if list_alternatives:
|
||||
alternatives = ", ".join(str(alternative) for alternative in value_schemas)
|
||||
if default_description:
|
||||
alternatives = f"{alternatives}, {default_description}"
|
||||
else:
|
||||
# mypy does not understand that default_description is not None here
|
||||
alternatives = default_description # type: ignore[assignment]
|
||||
raise vol.Invalid(
|
||||
f"Unexpected value for {key}: '{key_value}'. Expected {alternatives}"
|
||||
)
|
||||
@@ -1502,16 +1516,21 @@ NUMERIC_STATE_THRESHOLD_SCHEMA = vol.Any(
|
||||
vol.All(str, entity_domain(["input_number", "number", "sensor", "zone"])),
|
||||
)
|
||||
|
||||
CONDITION_BASE_SCHEMA: VolDictType = {
|
||||
|
||||
_CONDITION_COMMON_SCHEMA: VolDictType = {
|
||||
vol.Optional(CONF_ALIAS): string,
|
||||
vol.Optional(CONF_ENABLED): vol.Any(boolean, template),
|
||||
}
|
||||
|
||||
CONDITION_BASE_SCHEMA: VolDictType = {
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): str,
|
||||
}
|
||||
|
||||
NUMERIC_STATE_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "numeric_state",
|
||||
vol.Required(CONF_ENTITY_ID): entity_ids_or_uuids,
|
||||
vol.Optional(CONF_ATTRIBUTE): str,
|
||||
CONF_BELOW: NUMERIC_STATE_THRESHOLD_SCHEMA,
|
||||
@@ -1524,7 +1543,6 @@ NUMERIC_STATE_CONDITION_SCHEMA = vol.All(
|
||||
|
||||
STATE_CONDITION_BASE_SCHEMA = {
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "state",
|
||||
vol.Required(CONF_ENTITY_ID): entity_ids_or_uuids,
|
||||
vol.Optional(CONF_MATCH, default=ENTITY_MATCH_ALL): vol.All(
|
||||
vol.Lower, vol.Any(ENTITY_MATCH_ALL, ENTITY_MATCH_ANY)
|
||||
@@ -1567,7 +1585,6 @@ def STATE_CONDITION_SCHEMA(value: Any) -> dict[str, Any]:
|
||||
TEMPLATE_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "template",
|
||||
vol.Required(CONF_VALUE_TEMPLATE): template,
|
||||
}
|
||||
)
|
||||
@@ -1576,7 +1593,6 @@ TIME_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "time",
|
||||
vol.Optional("before"): vol.Any(
|
||||
time, vol.All(str, entity_domain(["input_datetime", "time", "sensor"]))
|
||||
),
|
||||
@@ -1592,7 +1608,6 @@ TIME_CONDITION_SCHEMA = vol.All(
|
||||
TRIGGER_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "trigger",
|
||||
vol.Required(CONF_ID): vol.All(ensure_list, [string]),
|
||||
}
|
||||
)
|
||||
@@ -1600,7 +1615,6 @@ TRIGGER_CONDITION_SCHEMA = vol.Schema(
|
||||
AND_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "and",
|
||||
vol.Required(CONF_CONDITIONS): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1611,7 +1625,7 @@ AND_CONDITION_SCHEMA = vol.Schema(
|
||||
|
||||
AND_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required("and"): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1623,7 +1637,6 @@ AND_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
OR_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "or",
|
||||
vol.Required(CONF_CONDITIONS): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1634,7 +1647,7 @@ OR_CONDITION_SCHEMA = vol.Schema(
|
||||
|
||||
OR_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required("or"): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1646,7 +1659,6 @@ OR_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
NOT_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "not",
|
||||
vol.Required(CONF_CONDITIONS): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1657,7 +1669,7 @@ NOT_CONDITION_SCHEMA = vol.Schema(
|
||||
|
||||
NOT_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required("not"): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1669,7 +1681,6 @@ NOT_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
DEVICE_CONDITION_BASE_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "device",
|
||||
vol.Required(CONF_DEVICE_ID): str,
|
||||
vol.Required(CONF_DOMAIN): str,
|
||||
vol.Remove("metadata"): dict,
|
||||
@@ -1725,7 +1736,7 @@ dynamic_template_condition = vol.All(
|
||||
|
||||
CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1752,8 +1763,8 @@ BUILT_IN_CONDITIONS: ValueSchemas = {
|
||||
def _base_condition_validator(value: Any) -> Any:
|
||||
vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
CONF_CONDITION: vol.NotIn(BUILT_IN_CONDITIONS),
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
CONF_CONDITION: vol.All(str, vol.NotIn(BUILT_IN_CONDITIONS)),
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)(value)
|
||||
@@ -1768,6 +1779,8 @@ CONDITION_SCHEMA: vol.Schema = vol.Schema(
|
||||
CONF_CONDITION,
|
||||
BUILT_IN_CONDITIONS,
|
||||
_base_condition_validator,
|
||||
"a condition, a list of conditions or a valid template",
|
||||
list_alternatives=False,
|
||||
),
|
||||
),
|
||||
dynamic_template_condition,
|
||||
@@ -1779,7 +1792,7 @@ CONDITIONS_SCHEMA = vol.All(ensure_list, [CONDITION_SCHEMA])
|
||||
dynamic_template_condition_action = vol.All(
|
||||
# Wrap a shorthand template condition action in a template condition
|
||||
vol.Schema(
|
||||
{**CONDITION_BASE_SCHEMA, vol.Required(CONF_CONDITION): dynamic_template}
|
||||
{**_CONDITION_COMMON_SCHEMA, vol.Required(CONF_CONDITION): dynamic_template}
|
||||
),
|
||||
lambda config: {
|
||||
**config,
|
||||
@@ -1799,7 +1812,8 @@ CONDITION_ACTION_SCHEMA: vol.Schema = vol.Schema(
|
||||
dynamic_template_condition_action,
|
||||
_base_condition_validator,
|
||||
),
|
||||
"a list of conditions or a valid template",
|
||||
"a condition, a list of conditions or a valid template",
|
||||
list_alternatives=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
@@ -31,7 +31,6 @@ from typing import (
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
from urllib.parse import urlencode as urllib_urlencode
|
||||
import weakref
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
@@ -82,12 +81,7 @@ from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.translation import async_translate_state
|
||||
from homeassistant.helpers.typing import TemplateVarsType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import (
|
||||
convert,
|
||||
dt as dt_util,
|
||||
location as location_util,
|
||||
slugify as slugify_util,
|
||||
)
|
||||
from homeassistant.util import convert, dt as dt_util, location as location_util
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads
|
||||
@@ -2286,46 +2280,6 @@ def _is_string_like(value: Any) -> bool:
|
||||
return isinstance(value, (str, bytes, bytearray))
|
||||
|
||||
|
||||
def regex_match(value, find="", ignorecase=False):
|
||||
"""Match value using regex."""
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
flags = re.IGNORECASE if ignorecase else 0
|
||||
return bool(_regex_cache(find, flags).match(value))
|
||||
|
||||
|
||||
_regex_cache = lru_cache(maxsize=128)(re.compile)
|
||||
|
||||
|
||||
def regex_replace(value="", find="", replace="", ignorecase=False):
|
||||
"""Replace using regex."""
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
flags = re.IGNORECASE if ignorecase else 0
|
||||
return _regex_cache(find, flags).sub(replace, value)
|
||||
|
||||
|
||||
def regex_search(value, find="", ignorecase=False):
|
||||
"""Search using regex."""
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
flags = re.IGNORECASE if ignorecase else 0
|
||||
return bool(_regex_cache(find, flags).search(value))
|
||||
|
||||
|
||||
def regex_findall_index(value, find="", index=0, ignorecase=False):
|
||||
"""Find all matches using regex and then pick specific match index."""
|
||||
return regex_findall(value, find, ignorecase)[index]
|
||||
|
||||
|
||||
def regex_findall(value, find="", ignorecase=False):
|
||||
"""Find all matches using regex."""
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
flags = re.IGNORECASE if ignorecase else 0
|
||||
return _regex_cache(find, flags).findall(value)
|
||||
|
||||
|
||||
def struct_pack(value: Any | None, format_string: str) -> bytes | None:
|
||||
"""Pack an object into a bytes object."""
|
||||
try:
|
||||
@@ -2367,16 +2321,6 @@ def from_hex(value: str) -> bytes:
|
||||
return bytes.fromhex(value)
|
||||
|
||||
|
||||
def ordinal(value):
|
||||
"""Perform ordinal conversion."""
|
||||
suffixes = ["th", "st", "nd", "rd"] + ["th"] * 6 # codespell:ignore nd
|
||||
return str(value) + (
|
||||
suffixes[(int(str(value)[-1])) % 10]
|
||||
if int(str(value)[-2:]) % 100 not in range(11, 14)
|
||||
else "th"
|
||||
)
|
||||
|
||||
|
||||
def from_json(value, default=_SENTINEL):
|
||||
"""Convert a JSON string to an object."""
|
||||
try:
|
||||
@@ -2523,16 +2467,6 @@ def time_until(hass: HomeAssistant, value: Any | datetime, precision: int = 1) -
|
||||
return dt_util.get_time_remaining(value, precision)
|
||||
|
||||
|
||||
def urlencode(value):
|
||||
"""Urlencode dictionary and return as UTF-8 string."""
|
||||
return urllib_urlencode(value).encode("utf-8")
|
||||
|
||||
|
||||
def slugify(value, separator="_"):
|
||||
"""Convert a string into a slug, such as what is used for entity ids."""
|
||||
return slugify_util(value, separator=separator)
|
||||
|
||||
|
||||
def iif(
|
||||
value: Any, if_true: Any = True, if_false: Any = False, if_none: Any = _SENTINEL
|
||||
) -> Any:
|
||||
@@ -2828,6 +2762,8 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.add_extension("homeassistant.helpers.template.extensions.Base64Extension")
|
||||
self.add_extension("homeassistant.helpers.template.extensions.CryptoExtension")
|
||||
self.add_extension("homeassistant.helpers.template.extensions.MathExtension")
|
||||
self.add_extension("homeassistant.helpers.template.extensions.RegexExtension")
|
||||
self.add_extension("homeassistant.helpers.template.extensions.StringExtension")
|
||||
|
||||
self.globals["as_datetime"] = as_datetime
|
||||
self.globals["as_function"] = as_function
|
||||
@@ -2847,7 +2783,6 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.globals["pack"] = struct_pack
|
||||
self.globals["set"] = _to_set
|
||||
self.globals["shuffle"] = shuffle
|
||||
self.globals["slugify"] = slugify
|
||||
self.globals["strptime"] = strptime
|
||||
self.globals["symmetric_difference"] = symmetric_difference
|
||||
self.globals["timedelta"] = timedelta
|
||||
@@ -2855,7 +2790,6 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.globals["typeof"] = typeof
|
||||
self.globals["union"] = union
|
||||
self.globals["unpack"] = struct_unpack
|
||||
self.globals["urlencode"] = urlencode
|
||||
self.globals["version"] = version
|
||||
self.globals["zip"] = zip
|
||||
|
||||
@@ -2881,17 +2815,10 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.filters["is_number"] = is_number
|
||||
self.filters["multiply"] = multiply
|
||||
self.filters["ord"] = ord
|
||||
self.filters["ordinal"] = ordinal
|
||||
self.filters["pack"] = struct_pack
|
||||
self.filters["random"] = random_every_time
|
||||
self.filters["regex_findall_index"] = regex_findall_index
|
||||
self.filters["regex_findall"] = regex_findall
|
||||
self.filters["regex_match"] = regex_match
|
||||
self.filters["regex_replace"] = regex_replace
|
||||
self.filters["regex_search"] = regex_search
|
||||
self.filters["round"] = forgiving_round
|
||||
self.filters["shuffle"] = shuffle
|
||||
self.filters["slugify"] = slugify
|
||||
self.filters["symmetric_difference"] = symmetric_difference
|
||||
self.filters["timestamp_custom"] = timestamp_custom
|
||||
self.filters["timestamp_local"] = timestamp_local
|
||||
@@ -2907,8 +2834,6 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.tests["datetime"] = _is_datetime
|
||||
self.tests["is_number"] = is_number
|
||||
self.tests["list"] = _is_list
|
||||
self.tests["match"] = regex_match
|
||||
self.tests["search"] = regex_search
|
||||
self.tests["set"] = _is_set
|
||||
self.tests["string_like"] = _is_string_like
|
||||
self.tests["tuple"] = _is_tuple
|
||||
|
@@ -3,5 +3,13 @@
|
||||
from .base64 import Base64Extension
|
||||
from .crypto import CryptoExtension
|
||||
from .math import MathExtension
|
||||
from .regex import RegexExtension
|
||||
from .string import StringExtension
|
||||
|
||||
__all__ = ["Base64Extension", "CryptoExtension", "MathExtension"]
|
||||
__all__ = [
|
||||
"Base64Extension",
|
||||
"CryptoExtension",
|
||||
"MathExtension",
|
||||
"RegexExtension",
|
||||
"StringExtension",
|
||||
]
|
||||
|
109
homeassistant/helpers/template/extensions/regex.py
Normal file
109
homeassistant/helpers/template/extensions/regex.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Jinja2 extension for regular expression functions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from .base import BaseTemplateExtension, TemplateFunction
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.template import TemplateEnvironment
|
||||
|
||||
# Module-level regex cache shared across all instances
|
||||
_regex_cache = lru_cache(maxsize=128)(re.compile)
|
||||
|
||||
|
||||
class RegexExtension(BaseTemplateExtension):
|
||||
"""Jinja2 extension for regular expression functions."""
|
||||
|
||||
def __init__(self, environment: TemplateEnvironment) -> None:
|
||||
"""Initialize the regex extension."""
|
||||
|
||||
super().__init__(
|
||||
environment,
|
||||
functions=[
|
||||
TemplateFunction(
|
||||
"regex_match",
|
||||
self.regex_match,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"regex_search",
|
||||
self.regex_search,
|
||||
as_filter=True,
|
||||
),
|
||||
# Register tests with different names
|
||||
TemplateFunction(
|
||||
"match",
|
||||
self.regex_match,
|
||||
as_test=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"search",
|
||||
self.regex_search,
|
||||
as_test=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"regex_replace",
|
||||
self.regex_replace,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"regex_findall",
|
||||
self.regex_findall,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"regex_findall_index",
|
||||
self.regex_findall_index,
|
||||
as_filter=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
def regex_match(self, value: Any, find: str = "", ignorecase: bool = False) -> bool:
|
||||
"""Match value using regex."""
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
flags = re.IGNORECASE if ignorecase else 0
|
||||
return bool(_regex_cache(find, flags).match(value))
|
||||
|
||||
def regex_replace(
|
||||
self,
|
||||
value: Any = "",
|
||||
find: str = "",
|
||||
replace: str = "",
|
||||
ignorecase: bool = False,
|
||||
) -> str:
|
||||
"""Replace using regex."""
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
flags = re.IGNORECASE if ignorecase else 0
|
||||
result = _regex_cache(find, flags).sub(replace, value)
|
||||
return str(result)
|
||||
|
||||
def regex_search(
|
||||
self, value: Any, find: str = "", ignorecase: bool = False
|
||||
) -> bool:
|
||||
"""Search using regex."""
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
flags = re.IGNORECASE if ignorecase else 0
|
||||
return bool(_regex_cache(find, flags).search(value))
|
||||
|
||||
def regex_findall_index(
|
||||
self, value: Any, find: str = "", index: int = 0, ignorecase: bool = False
|
||||
) -> str:
|
||||
"""Find all matches using regex and then pick specific match index."""
|
||||
return self.regex_findall(value, find, ignorecase)[index]
|
||||
|
||||
def regex_findall(
|
||||
self, value: Any, find: str = "", ignorecase: bool = False
|
||||
) -> list[str]:
|
||||
"""Find all matches using regex."""
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
flags = re.IGNORECASE if ignorecase else 0
|
||||
return _regex_cache(find, flags).findall(value)
|
58
homeassistant/helpers/template/extensions/string.py
Normal file
58
homeassistant/helpers/template/extensions/string.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Jinja2 extension for string processing functions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from urllib.parse import urlencode as urllib_urlencode
|
||||
|
||||
from homeassistant.util import slugify as slugify_util
|
||||
|
||||
from .base import BaseTemplateExtension, TemplateFunction
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.template import TemplateEnvironment
|
||||
|
||||
|
||||
class StringExtension(BaseTemplateExtension):
|
||||
"""Jinja2 extension for string processing functions."""
|
||||
|
||||
def __init__(self, environment: TemplateEnvironment) -> None:
|
||||
"""Initialize the string extension."""
|
||||
super().__init__(
|
||||
environment,
|
||||
functions=[
|
||||
TemplateFunction(
|
||||
"ordinal",
|
||||
self.ordinal,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"slugify",
|
||||
self.slugify,
|
||||
as_global=True,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"urlencode",
|
||||
self.urlencode,
|
||||
as_global=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
def ordinal(self, value: Any) -> str:
|
||||
"""Perform ordinal conversion."""
|
||||
suffixes = ["th", "st", "nd", "rd"] + ["th"] * 6 # codespell:ignore nd
|
||||
return str(value) + (
|
||||
suffixes[(int(str(value)[-1])) % 10]
|
||||
if int(str(value)[-2:]) % 100 not in range(11, 14)
|
||||
else "th"
|
||||
)
|
||||
|
||||
def slugify(self, value: Any, separator: str = "_") -> str:
|
||||
"""Convert a string into a slug, such as what is used for entity ids."""
|
||||
return slugify_util(str(value), separator=separator)
|
||||
|
||||
def urlencode(self, value: Any) -> bytes:
|
||||
"""Urlencode dictionary and return as UTF-8 string."""
|
||||
return urllib_urlencode(value).encode("utf-8")
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user