mirror of
https://github.com/home-assistant/core.git
synced 2025-07-18 18:57:06 +00:00
Merge branch 'dev' into mqtt-subentry-export
This commit is contained in:
commit
a00af59d15
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@ -1758,8 +1758,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/wirelesstag/ @sergeymaysak
|
/homeassistant/components/wirelesstag/ @sergeymaysak
|
||||||
/homeassistant/components/withings/ @joostlek
|
/homeassistant/components/withings/ @joostlek
|
||||||
/tests/components/withings/ @joostlek
|
/tests/components/withings/ @joostlek
|
||||||
/homeassistant/components/wiz/ @sbidy
|
/homeassistant/components/wiz/ @sbidy @arturpragacz
|
||||||
/tests/components/wiz/ @sbidy
|
/tests/components/wiz/ @sbidy @arturpragacz
|
||||||
/homeassistant/components/wled/ @frenck
|
/homeassistant/components/wled/ @frenck
|
||||||
/tests/components/wled/ @frenck
|
/tests/components/wled/ @frenck
|
||||||
/homeassistant/components/wmspro/ @mback2k
|
/homeassistant/components/wmspro/ @mback2k
|
||||||
|
@ -13,7 +13,7 @@ from homeassistant.components.conversation import (
|
|||||||
)
|
)
|
||||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||||
from homeassistant.helpers import llm
|
from homeassistant.helpers import llm
|
||||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
from homeassistant.helpers.chat_session import ChatSession
|
||||||
from homeassistant.helpers.restore_state import RestoreEntity
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
@ -56,12 +56,12 @@ class AITaskEntity(RestoreEntity):
|
|||||||
@contextlib.asynccontextmanager
|
@contextlib.asynccontextmanager
|
||||||
async def _async_get_ai_task_chat_log(
|
async def _async_get_ai_task_chat_log(
|
||||||
self,
|
self,
|
||||||
|
session: ChatSession,
|
||||||
task: GenDataTask,
|
task: GenDataTask,
|
||||||
) -> AsyncGenerator[ChatLog]:
|
) -> AsyncGenerator[ChatLog]:
|
||||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||||
with (
|
with (
|
||||||
async_get_chat_session(self.hass) as session,
|
|
||||||
async_get_chat_log(
|
async_get_chat_log(
|
||||||
self.hass,
|
self.hass,
|
||||||
session,
|
session,
|
||||||
@ -88,12 +88,13 @@ class AITaskEntity(RestoreEntity):
|
|||||||
@final
|
@final
|
||||||
async def internal_async_generate_data(
|
async def internal_async_generate_data(
|
||||||
self,
|
self,
|
||||||
|
session: ChatSession,
|
||||||
task: GenDataTask,
|
task: GenDataTask,
|
||||||
) -> GenDataTaskResult:
|
) -> GenDataTaskResult:
|
||||||
"""Run a gen data task."""
|
"""Run a gen data task."""
|
||||||
self.__last_activity = dt_util.utcnow().isoformat()
|
self.__last_activity = dt_util.utcnow().isoformat()
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
async with self._async_get_ai_task_chat_log(task) as chat_log:
|
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
|
||||||
return await self._async_generate_data(task, chat_log)
|
return await self._async_generate_data(task, chat_log)
|
||||||
|
|
||||||
async def _async_generate_data(
|
async def _async_generate_data(
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "ai_task",
|
"domain": "ai_task",
|
||||||
"name": "AI Task",
|
"name": "AI Task",
|
||||||
|
"after_dependencies": ["camera"],
|
||||||
"codeowners": ["@home-assistant/core"],
|
"codeowners": ["@home-assistant/core"],
|
||||||
"dependencies": ["conversation", "media_source"],
|
"dependencies": ["conversation", "media_source"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||||
|
@ -15,6 +15,7 @@ generate_data:
|
|||||||
required: false
|
required: false
|
||||||
selector:
|
selector:
|
||||||
entity:
|
entity:
|
||||||
|
filter:
|
||||||
domain: ai_task
|
domain: ai_task
|
||||||
supported_features:
|
supported_features:
|
||||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||||
|
@ -3,17 +3,32 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
import mimetypes
|
||||||
|
from pathlib import Path
|
||||||
|
import tempfile
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import conversation, media_source
|
from homeassistant.components import camera, conversation, media_source
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||||
|
|
||||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
||||||
|
|
||||||
|
|
||||||
|
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||||
|
"""Save camera snapshot to temp file."""
|
||||||
|
with tempfile.NamedTemporaryFile(
|
||||||
|
mode="wb",
|
||||||
|
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||||
|
delete=False,
|
||||||
|
) as temp_file:
|
||||||
|
temp_file.write(image.content)
|
||||||
|
return Path(temp_file.name)
|
||||||
|
|
||||||
|
|
||||||
async def async_generate_data(
|
async def async_generate_data(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
*,
|
*,
|
||||||
@ -40,40 +55,78 @@ async def async_generate_data(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Resolve attachments
|
# Resolve attachments
|
||||||
resolved_attachments: list[conversation.Attachment] | None = None
|
resolved_attachments: list[conversation.Attachment] = []
|
||||||
|
created_files: list[Path] = []
|
||||||
|
|
||||||
if attachments:
|
if (
|
||||||
if AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features:
|
attachments
|
||||||
|
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||||
|
):
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"AI Task entity {entity_id} does not support attachments"
|
f"AI Task entity {entity_id} does not support attachments"
|
||||||
)
|
)
|
||||||
|
|
||||||
resolved_attachments = []
|
for attachment in attachments or []:
|
||||||
|
media_content_id = attachment["media_content_id"]
|
||||||
|
|
||||||
for attachment in attachments:
|
# Special case for camera media sources
|
||||||
media = await media_source.async_resolve_media(
|
if media_content_id.startswith("media-source://camera/"):
|
||||||
hass, attachment["media_content_id"], None
|
# Extract entity_id from the media content ID
|
||||||
|
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||||
|
|
||||||
|
# Get snapshot from camera
|
||||||
|
image = await camera.async_get_image(hass, entity_id)
|
||||||
|
|
||||||
|
temp_filename = await hass.async_add_executor_job(
|
||||||
|
_save_camera_snapshot, image
|
||||||
)
|
)
|
||||||
|
created_files.append(temp_filename)
|
||||||
|
|
||||||
|
resolved_attachments.append(
|
||||||
|
conversation.Attachment(
|
||||||
|
media_content_id=media_content_id,
|
||||||
|
mime_type=image.content_type,
|
||||||
|
path=temp_filename,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Handle regular media sources
|
||||||
|
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||||
if media.path is None:
|
if media.path is None:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
"Only local attachments are currently supported"
|
"Only local attachments are currently supported"
|
||||||
)
|
)
|
||||||
resolved_attachments.append(
|
resolved_attachments.append(
|
||||||
conversation.Attachment(
|
conversation.Attachment(
|
||||||
media_content_id=attachment["media_content_id"],
|
media_content_id=media_content_id,
|
||||||
url=media.url,
|
|
||||||
mime_type=media.mime_type,
|
mime_type=media.mime_type,
|
||||||
path=media.path,
|
path=media.path,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
with async_get_chat_session(hass) as session:
|
||||||
|
if created_files:
|
||||||
|
|
||||||
|
def cleanup_files() -> None:
|
||||||
|
"""Cleanup temporary files."""
|
||||||
|
for file in created_files:
|
||||||
|
file.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def cleanup_files_callback() -> None:
|
||||||
|
"""Cleanup temporary files."""
|
||||||
|
hass.async_add_executor_job(cleanup_files)
|
||||||
|
|
||||||
|
session.async_on_cleanup(cleanup_files_callback)
|
||||||
|
|
||||||
return await entity.internal_async_generate_data(
|
return await entity.internal_async_generate_data(
|
||||||
|
session,
|
||||||
GenDataTask(
|
GenDataTask(
|
||||||
name=task_name,
|
name=task_name,
|
||||||
instructions=instructions,
|
instructions=instructions,
|
||||||
structure=structure,
|
structure=structure,
|
||||||
attachments=resolved_attachments,
|
attachments=resolved_attachments or None,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["aioairzone_cloud"],
|
"loggers": ["aioairzone_cloud"],
|
||||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
"requirements": ["aioairzone-cloud==0.6.13"]
|
||||||
}
|
}
|
||||||
|
@ -2,11 +2,22 @@
|
|||||||
|
|
||||||
import amberelectric
|
import amberelectric
|
||||||
|
|
||||||
|
from homeassistant.components.sensor import ConfigType
|
||||||
from homeassistant.const import CONF_API_TOKEN
|
from homeassistant.const import CONF_API_TOKEN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
|
||||||
from .const import CONF_SITE_ID, PLATFORMS
|
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||||
|
from .services import setup_services
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up the Amber component."""
|
||||||
|
setup_services(hass)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:
|
||||||
|
@ -1,14 +1,24 @@
|
|||||||
"""Amber Electric Constants."""
|
"""Amber Electric Constants."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
|
|
||||||
DOMAIN = "amberelectric"
|
DOMAIN: Final = "amberelectric"
|
||||||
CONF_SITE_NAME = "site_name"
|
CONF_SITE_NAME = "site_name"
|
||||||
CONF_SITE_ID = "site_id"
|
CONF_SITE_ID = "site_id"
|
||||||
|
|
||||||
|
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
||||||
|
ATTR_CHANNEL_TYPE = "channel_type"
|
||||||
|
|
||||||
ATTRIBUTION = "Data provided by Amber Electric"
|
ATTRIBUTION = "Data provided by Amber Electric"
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__package__)
|
LOGGER = logging.getLogger(__package__)
|
||||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||||
|
|
||||||
|
SERVICE_GET_FORECASTS = "get_forecasts"
|
||||||
|
|
||||||
|
GENERAL_CHANNEL = "general"
|
||||||
|
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
||||||
|
FEED_IN_CHANNEL = "feed_in"
|
||||||
|
@ -10,7 +10,6 @@ from amberelectric.models.actual_interval import ActualInterval
|
|||||||
from amberelectric.models.channel import ChannelType
|
from amberelectric.models.channel import ChannelType
|
||||||
from amberelectric.models.current_interval import CurrentInterval
|
from amberelectric.models.current_interval import CurrentInterval
|
||||||
from amberelectric.models.forecast_interval import ForecastInterval
|
from amberelectric.models.forecast_interval import ForecastInterval
|
||||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
|
||||||
from amberelectric.rest import ApiException
|
from amberelectric.rest import ApiException
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@ -18,6 +17,7 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import LOGGER
|
from .const import LOGGER
|
||||||
|
from .helpers import normalize_descriptor
|
||||||
|
|
||||||
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
||||||
|
|
||||||
@ -49,27 +49,6 @@ def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) ->
|
|||||||
return interval.channel_type == ChannelType.FEEDIN
|
return interval.channel_type == ChannelType.FEEDIN
|
||||||
|
|
||||||
|
|
||||||
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
|
||||||
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
|
||||||
if descriptor is None:
|
|
||||||
return None
|
|
||||||
if descriptor.value == "spike":
|
|
||||||
return "spike"
|
|
||||||
if descriptor.value == "high":
|
|
||||||
return "high"
|
|
||||||
if descriptor.value == "neutral":
|
|
||||||
return "neutral"
|
|
||||||
if descriptor.value == "low":
|
|
||||||
return "low"
|
|
||||||
if descriptor.value == "veryLow":
|
|
||||||
return "very_low"
|
|
||||||
if descriptor.value == "extremelyLow":
|
|
||||||
return "extremely_low"
|
|
||||||
if descriptor.value == "negative":
|
|
||||||
return "negative"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class AmberUpdateCoordinator(DataUpdateCoordinator):
|
class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||||
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
|
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
|
||||||
|
|
||||||
@ -103,7 +82,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
"grid": {},
|
"grid": {},
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
data = self._api.get_current_prices(self.site_id, next=48)
|
data = self._api.get_current_prices(self.site_id, next=288)
|
||||||
intervals = [interval.actual_instance for interval in data]
|
intervals = [interval.actual_instance for interval in data]
|
||||||
except ApiException as api_exception:
|
except ApiException as api_exception:
|
||||||
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
||||||
|
25
homeassistant/components/amberelectric/helpers.py
Normal file
25
homeassistant/components/amberelectric/helpers.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
"""Formatting helpers used to convert things."""
|
||||||
|
|
||||||
|
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||||
|
|
||||||
|
DESCRIPTOR_MAP: dict[str, str] = {
|
||||||
|
PriceDescriptor.SPIKE: "spike",
|
||||||
|
PriceDescriptor.HIGH: "high",
|
||||||
|
PriceDescriptor.NEUTRAL: "neutral",
|
||||||
|
PriceDescriptor.LOW: "low",
|
||||||
|
PriceDescriptor.VERYLOW: "very_low",
|
||||||
|
PriceDescriptor.EXTREMELYLOW: "extremely_low",
|
||||||
|
PriceDescriptor.NEGATIVE: "negative",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
||||||
|
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
||||||
|
if descriptor in DESCRIPTOR_MAP:
|
||||||
|
return DESCRIPTOR_MAP[descriptor]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def format_cents_to_dollars(cents: float) -> float:
|
||||||
|
"""Return a formatted conversion from cents to dollars."""
|
||||||
|
return round(cents / 100, 2)
|
@ -22,5 +22,10 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"services": {
|
||||||
|
"get_forecasts": {
|
||||||
|
"service": "mdi:transmission-tower"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,16 +23,12 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import ATTRIBUTION
|
from .const import ATTRIBUTION
|
||||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator, normalize_descriptor
|
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||||
|
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||||
|
|
||||||
UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||||
|
|
||||||
|
|
||||||
def format_cents_to_dollars(cents: float) -> float:
|
|
||||||
"""Return a formatted conversion from cents to dollars."""
|
|
||||||
return round(cents / 100, 2)
|
|
||||||
|
|
||||||
|
|
||||||
def friendly_channel_type(channel_type: str) -> str:
|
def friendly_channel_type(channel_type: str) -> str:
|
||||||
"""Return a human readable version of the channel type."""
|
"""Return a human readable version of the channel type."""
|
||||||
if channel_type == "controlled_load":
|
if channel_type == "controlled_load":
|
||||||
|
121
homeassistant/components/amberelectric/services.py
Normal file
121
homeassistant/components/amberelectric/services.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
"""Amber Electric Service class."""
|
||||||
|
|
||||||
|
from amberelectric.models.channel import ChannelType
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
|
from homeassistant.core import (
|
||||||
|
HomeAssistant,
|
||||||
|
ServiceCall,
|
||||||
|
ServiceResponse,
|
||||||
|
SupportsResponse,
|
||||||
|
)
|
||||||
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
|
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||||
|
from homeassistant.util.json import JsonValueType
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
ATTR_CHANNEL_TYPE,
|
||||||
|
ATTR_CONFIG_ENTRY_ID,
|
||||||
|
CONTROLLED_LOAD_CHANNEL,
|
||||||
|
DOMAIN,
|
||||||
|
FEED_IN_CHANNEL,
|
||||||
|
GENERAL_CHANNEL,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
)
|
||||||
|
from .coordinator import AmberConfigEntry
|
||||||
|
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||||
|
|
||||||
|
GET_FORECASTS_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
ATTR_CONFIG_ENTRY_ID: ConfigEntrySelector({"integration": DOMAIN}),
|
||||||
|
ATTR_CHANNEL_TYPE: vol.In(
|
||||||
|
[GENERAL_CHANNEL, CONTROLLED_LOAD_CHANNEL, FEED_IN_CHANNEL]
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> AmberConfigEntry:
|
||||||
|
"""Get the Amber config entry."""
|
||||||
|
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||||
|
raise ServiceValidationError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="integration_not_found",
|
||||||
|
translation_placeholders={"target": config_entry_id},
|
||||||
|
)
|
||||||
|
if entry.state is not ConfigEntryState.LOADED:
|
||||||
|
raise ServiceValidationError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="not_loaded",
|
||||||
|
translation_placeholders={"target": entry.title},
|
||||||
|
)
|
||||||
|
return entry
|
||||||
|
|
||||||
|
|
||||||
|
def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||||
|
"""Return an array of forecasts."""
|
||||||
|
results: list[JsonValueType] = []
|
||||||
|
|
||||||
|
if channel_type not in data["forecasts"]:
|
||||||
|
raise ServiceValidationError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="channel_not_found",
|
||||||
|
translation_placeholders={"channel_type": channel_type},
|
||||||
|
)
|
||||||
|
|
||||||
|
intervals = data["forecasts"][channel_type]
|
||||||
|
|
||||||
|
for interval in intervals:
|
||||||
|
datum = {}
|
||||||
|
datum["duration"] = interval.duration
|
||||||
|
datum["date"] = interval.var_date.isoformat()
|
||||||
|
datum["nem_date"] = interval.nem_time.isoformat()
|
||||||
|
datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
|
||||||
|
if interval.channel_type == ChannelType.FEEDIN:
|
||||||
|
datum["per_kwh"] = datum["per_kwh"] * -1
|
||||||
|
datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
|
||||||
|
datum["start_time"] = interval.start_time.isoformat()
|
||||||
|
datum["end_time"] = interval.end_time.isoformat()
|
||||||
|
datum["renewables"] = round(interval.renewables)
|
||||||
|
datum["spike_status"] = interval.spike_status.value
|
||||||
|
datum["descriptor"] = normalize_descriptor(interval.descriptor)
|
||||||
|
|
||||||
|
if interval.range is not None:
|
||||||
|
datum["range_min"] = format_cents_to_dollars(interval.range.min)
|
||||||
|
datum["range_max"] = format_cents_to_dollars(interval.range.max)
|
||||||
|
|
||||||
|
if interval.advanced_price is not None:
|
||||||
|
multiplier = -1 if interval.channel_type == ChannelType.FEEDIN else 1
|
||||||
|
datum["advanced_price_low"] = multiplier * format_cents_to_dollars(
|
||||||
|
interval.advanced_price.low
|
||||||
|
)
|
||||||
|
datum["advanced_price_predicted"] = multiplier * format_cents_to_dollars(
|
||||||
|
interval.advanced_price.predicted
|
||||||
|
)
|
||||||
|
datum["advanced_price_high"] = multiplier * format_cents_to_dollars(
|
||||||
|
interval.advanced_price.high
|
||||||
|
)
|
||||||
|
|
||||||
|
results.append(datum)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def setup_services(hass: HomeAssistant) -> None:
|
||||||
|
"""Set up the services for the Amber integration."""
|
||||||
|
|
||||||
|
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||||
|
channel_type = call.data[ATTR_CHANNEL_TYPE]
|
||||||
|
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
forecasts = get_forecasts(channel_type, coordinator.data)
|
||||||
|
return {"forecasts": forecasts}
|
||||||
|
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
handle_get_forecasts,
|
||||||
|
GET_FORECASTS_SCHEMA,
|
||||||
|
supports_response=SupportsResponse.ONLY,
|
||||||
|
)
|
16
homeassistant/components/amberelectric/services.yaml
Normal file
16
homeassistant/components/amberelectric/services.yaml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
get_forecasts:
|
||||||
|
fields:
|
||||||
|
config_entry_id:
|
||||||
|
required: true
|
||||||
|
selector:
|
||||||
|
config_entry:
|
||||||
|
integration: amberelectric
|
||||||
|
channel_type:
|
||||||
|
required: true
|
||||||
|
selector:
|
||||||
|
select:
|
||||||
|
options:
|
||||||
|
- general
|
||||||
|
- controlled_load
|
||||||
|
- feed_in
|
||||||
|
translation_key: channel_type
|
@ -1,25 +1,61 @@
|
|||||||
{
|
{
|
||||||
"config": {
|
"config": {
|
||||||
|
"error": {
|
||||||
|
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||||
|
"no_site": "No site provided",
|
||||||
|
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||||
|
},
|
||||||
"step": {
|
"step": {
|
||||||
|
"site": {
|
||||||
|
"data": {
|
||||||
|
"site_id": "Site NMI",
|
||||||
|
"site_name": "Site name"
|
||||||
|
},
|
||||||
|
"description": "Select the NMI of the site you would like to add"
|
||||||
|
},
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||||
"site_id": "Site ID"
|
"site_id": "Site ID"
|
||||||
},
|
},
|
||||||
"description": "Go to {api_url} to generate an API key"
|
"description": "Go to {api_url} to generate an API key"
|
||||||
},
|
}
|
||||||
"site": {
|
|
||||||
"data": {
|
|
||||||
"site_id": "Site NMI",
|
|
||||||
"site_name": "Site Name"
|
|
||||||
},
|
|
||||||
"description": "Select the NMI of the site you would like to add"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"services": {
|
||||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
"get_forecasts": {
|
||||||
"no_site": "No site provided",
|
"name": "Get price forecasts",
|
||||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
"description": "Retrieves price forecasts from Amber Electric for a site.",
|
||||||
|
"fields": {
|
||||||
|
"config_entry_id": {
|
||||||
|
"description": "The config entry of the site to get forecasts for.",
|
||||||
|
"name": "Config entry"
|
||||||
|
},
|
||||||
|
"channel_type": {
|
||||||
|
"name": "Channel type",
|
||||||
|
"description": "The channel to get forecasts for."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"integration_not_found": {
|
||||||
|
"message": "Config entry \"{target}\" not found in registry."
|
||||||
|
},
|
||||||
|
"not_loaded": {
|
||||||
|
"message": "{target} is not loaded."
|
||||||
|
},
|
||||||
|
"channel_not_found": {
|
||||||
|
"message": "There is no {channel_type} channel at this site."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"selector": {
|
||||||
|
"channel_type": {
|
||||||
|
"options": {
|
||||||
|
"general": "General",
|
||||||
|
"controlled_load": "Controlled load",
|
||||||
|
"feed_in": "Feed-in"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["amcrest"],
|
"loggers": ["amcrest"],
|
||||||
"quality_scale": "legacy",
|
"quality_scale": "legacy",
|
||||||
"requirements": ["amcrest==1.9.8"]
|
"requirements": ["amcrest==1.9.9"]
|
||||||
}
|
}
|
||||||
|
@ -68,6 +68,7 @@ ask_question:
|
|||||||
required: true
|
required: true
|
||||||
selector:
|
selector:
|
||||||
entity:
|
entity:
|
||||||
|
filter:
|
||||||
domain: assist_satellite
|
domain: assist_satellite
|
||||||
supported_features:
|
supported_features:
|
||||||
- assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION
|
- assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION
|
||||||
|
@ -147,9 +147,6 @@ class Attachment:
|
|||||||
media_content_id: str
|
media_content_id: str
|
||||||
"""Media content ID of the attachment."""
|
"""Media content ID of the attachment."""
|
||||||
|
|
||||||
url: str
|
|
||||||
"""URL of the attachment."""
|
|
||||||
|
|
||||||
mime_type: str
|
mime_type: str
|
||||||
"""MIME type of the attachment."""
|
"""MIME type of the attachment."""
|
||||||
|
|
||||||
|
@ -25,7 +25,8 @@ PLATFORMS: list[Platform] = [Platform.TTS]
|
|||||||
|
|
||||||
async def get_model_by_id(client: AsyncElevenLabs, model_id: str) -> Model | None:
|
async def get_model_by_id(client: AsyncElevenLabs, model_id: str) -> Model | None:
|
||||||
"""Get ElevenLabs model from their API by the model_id."""
|
"""Get ElevenLabs model from their API by the model_id."""
|
||||||
models = await client.models.get_all()
|
models = await client.models.list()
|
||||||
|
|
||||||
for maybe_model in models:
|
for maybe_model in models:
|
||||||
if maybe_model.model_id == model_id:
|
if maybe_model.model_id == model_id:
|
||||||
return maybe_model
|
return maybe_model
|
||||||
|
@ -23,14 +23,12 @@ from . import ElevenLabsConfigEntry
|
|||||||
from .const import (
|
from .const import (
|
||||||
CONF_CONFIGURE_VOICE,
|
CONF_CONFIGURE_VOICE,
|
||||||
CONF_MODEL,
|
CONF_MODEL,
|
||||||
CONF_OPTIMIZE_LATENCY,
|
|
||||||
CONF_SIMILARITY,
|
CONF_SIMILARITY,
|
||||||
CONF_STABILITY,
|
CONF_STABILITY,
|
||||||
CONF_STYLE,
|
CONF_STYLE,
|
||||||
CONF_USE_SPEAKER_BOOST,
|
CONF_USE_SPEAKER_BOOST,
|
||||||
CONF_VOICE,
|
CONF_VOICE,
|
||||||
DEFAULT_MODEL,
|
DEFAULT_MODEL,
|
||||||
DEFAULT_OPTIMIZE_LATENCY,
|
|
||||||
DEFAULT_SIMILARITY,
|
DEFAULT_SIMILARITY,
|
||||||
DEFAULT_STABILITY,
|
DEFAULT_STABILITY,
|
||||||
DEFAULT_STYLE,
|
DEFAULT_STYLE,
|
||||||
@ -51,7 +49,8 @@ async def get_voices_models(
|
|||||||
httpx_client = get_async_client(hass)
|
httpx_client = get_async_client(hass)
|
||||||
client = AsyncElevenLabs(api_key=api_key, httpx_client=httpx_client)
|
client = AsyncElevenLabs(api_key=api_key, httpx_client=httpx_client)
|
||||||
voices = (await client.voices.get_all()).voices
|
voices = (await client.voices.get_all()).voices
|
||||||
models = await client.models.get_all()
|
models = await client.models.list()
|
||||||
|
|
||||||
voices_dict = {
|
voices_dict = {
|
||||||
voice.voice_id: voice.name
|
voice.voice_id: voice.name
|
||||||
for voice in sorted(voices, key=lambda v: v.name or "")
|
for voice in sorted(voices, key=lambda v: v.name or "")
|
||||||
@ -78,7 +77,12 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
try:
|
try:
|
||||||
voices, _ = await get_voices_models(self.hass, user_input[CONF_API_KEY])
|
voices, _ = await get_voices_models(self.hass, user_input[CONF_API_KEY])
|
||||||
except ApiError:
|
except ApiError as exc:
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
details = getattr(exc, "body", {}).get("detail", {})
|
||||||
|
if details:
|
||||||
|
status = details.get("status")
|
||||||
|
if status == "invalid_api_key":
|
||||||
errors["base"] = "invalid_api_key"
|
errors["base"] = "invalid_api_key"
|
||||||
else:
|
else:
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
@ -206,12 +210,6 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
|||||||
vol.Coerce(float),
|
vol.Coerce(float),
|
||||||
vol.Range(min=0, max=1),
|
vol.Range(min=0, max=1),
|
||||||
),
|
),
|
||||||
vol.Optional(
|
|
||||||
CONF_OPTIMIZE_LATENCY,
|
|
||||||
default=self.config_entry.options.get(
|
|
||||||
CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY
|
|
||||||
),
|
|
||||||
): vol.All(int, vol.Range(min=0, max=4)),
|
|
||||||
vol.Optional(
|
vol.Optional(
|
||||||
CONF_STYLE,
|
CONF_STYLE,
|
||||||
default=self.config_entry.options.get(CONF_STYLE, DEFAULT_STYLE),
|
default=self.config_entry.options.get(CONF_STYLE, DEFAULT_STYLE),
|
||||||
|
@ -7,7 +7,6 @@ CONF_MODEL = "model"
|
|||||||
CONF_CONFIGURE_VOICE = "configure_voice"
|
CONF_CONFIGURE_VOICE = "configure_voice"
|
||||||
CONF_STABILITY = "stability"
|
CONF_STABILITY = "stability"
|
||||||
CONF_SIMILARITY = "similarity"
|
CONF_SIMILARITY = "similarity"
|
||||||
CONF_OPTIMIZE_LATENCY = "optimize_streaming_latency"
|
|
||||||
CONF_STYLE = "style"
|
CONF_STYLE = "style"
|
||||||
CONF_USE_SPEAKER_BOOST = "use_speaker_boost"
|
CONF_USE_SPEAKER_BOOST = "use_speaker_boost"
|
||||||
DOMAIN = "elevenlabs"
|
DOMAIN = "elevenlabs"
|
||||||
@ -15,6 +14,5 @@ DOMAIN = "elevenlabs"
|
|||||||
DEFAULT_MODEL = "eleven_multilingual_v2"
|
DEFAULT_MODEL = "eleven_multilingual_v2"
|
||||||
DEFAULT_STABILITY = 0.5
|
DEFAULT_STABILITY = 0.5
|
||||||
DEFAULT_SIMILARITY = 0.75
|
DEFAULT_SIMILARITY = 0.75
|
||||||
DEFAULT_OPTIMIZE_LATENCY = 0
|
|
||||||
DEFAULT_STYLE = 0
|
DEFAULT_STYLE = 0
|
||||||
DEFAULT_USE_SPEAKER_BOOST = True
|
DEFAULT_USE_SPEAKER_BOOST = True
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["elevenlabs"],
|
"loggers": ["elevenlabs"],
|
||||||
"requirements": ["elevenlabs==1.9.0"]
|
"requirements": ["elevenlabs==2.3.0"]
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,8 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]"
|
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||||
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"options": {
|
"options": {
|
||||||
@ -32,14 +33,12 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"stability": "Stability",
|
"stability": "Stability",
|
||||||
"similarity": "Similarity",
|
"similarity": "Similarity",
|
||||||
"optimize_streaming_latency": "Latency",
|
|
||||||
"style": "Style",
|
"style": "Style",
|
||||||
"use_speaker_boost": "Speaker boost"
|
"use_speaker_boost": "Speaker boost"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"stability": "Stability of the generated audio. Higher values lead to less emotional audio.",
|
"stability": "Stability of the generated audio. Higher values lead to less emotional audio.",
|
||||||
"similarity": "Similarity of the generated audio to the original voice. Higher values may result in more similar audio, but may also introduce background noise.",
|
"similarity": "Similarity of the generated audio to the original voice. Higher values may result in more similar audio, but may also introduce background noise.",
|
||||||
"optimize_streaming_latency": "Optimize the model for streaming. This may reduce the quality of the generated audio.",
|
|
||||||
"style": "Style of the generated audio. Recommended to keep at 0 for most almost all use cases.",
|
"style": "Style of the generated audio. Recommended to keep at 0 for most almost all use cases.",
|
||||||
"use_speaker_boost": "Use speaker boost to increase the similarity of the generated audio to the original voice."
|
"use_speaker_boost": "Use speaker boost to increase the similarity of the generated audio to the original voice."
|
||||||
}
|
}
|
||||||
|
@ -25,13 +25,11 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from . import ElevenLabsConfigEntry
|
from . import ElevenLabsConfigEntry
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_MODEL,
|
ATTR_MODEL,
|
||||||
CONF_OPTIMIZE_LATENCY,
|
|
||||||
CONF_SIMILARITY,
|
CONF_SIMILARITY,
|
||||||
CONF_STABILITY,
|
CONF_STABILITY,
|
||||||
CONF_STYLE,
|
CONF_STYLE,
|
||||||
CONF_USE_SPEAKER_BOOST,
|
CONF_USE_SPEAKER_BOOST,
|
||||||
CONF_VOICE,
|
CONF_VOICE,
|
||||||
DEFAULT_OPTIMIZE_LATENCY,
|
|
||||||
DEFAULT_SIMILARITY,
|
DEFAULT_SIMILARITY,
|
||||||
DEFAULT_STABILITY,
|
DEFAULT_STABILITY,
|
||||||
DEFAULT_STYLE,
|
DEFAULT_STYLE,
|
||||||
@ -75,9 +73,6 @@ async def async_setup_entry(
|
|||||||
config_entry.entry_id,
|
config_entry.entry_id,
|
||||||
config_entry.title,
|
config_entry.title,
|
||||||
voice_settings,
|
voice_settings,
|
||||||
config_entry.options.get(
|
|
||||||
CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@ -98,7 +93,6 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
|||||||
entry_id: str,
|
entry_id: str,
|
||||||
title: str,
|
title: str,
|
||||||
voice_settings: VoiceSettings,
|
voice_settings: VoiceSettings,
|
||||||
latency: int = 0,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Init ElevenLabs TTS service."""
|
"""Init ElevenLabs TTS service."""
|
||||||
self._client = client
|
self._client = client
|
||||||
@ -115,7 +109,6 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
|||||||
if voice_indices:
|
if voice_indices:
|
||||||
self._voices.insert(0, self._voices.pop(voice_indices[0]))
|
self._voices.insert(0, self._voices.pop(voice_indices[0]))
|
||||||
self._voice_settings = voice_settings
|
self._voice_settings = voice_settings
|
||||||
self._latency = latency
|
|
||||||
|
|
||||||
# Entity attributes
|
# Entity attributes
|
||||||
self._attr_unique_id = entry_id
|
self._attr_unique_id = entry_id
|
||||||
@ -144,14 +137,14 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
|||||||
voice_id = options.get(ATTR_VOICE, self._default_voice_id)
|
voice_id = options.get(ATTR_VOICE, self._default_voice_id)
|
||||||
model = options.get(ATTR_MODEL, self._model.model_id)
|
model = options.get(ATTR_MODEL, self._model.model_id)
|
||||||
try:
|
try:
|
||||||
audio = await self._client.generate(
|
audio = self._client.text_to_speech.convert(
|
||||||
text=message,
|
text=message,
|
||||||
voice=voice_id,
|
voice_id=voice_id,
|
||||||
optimize_streaming_latency=self._latency,
|
|
||||||
voice_settings=self._voice_settings,
|
voice_settings=self._voice_settings,
|
||||||
model=model,
|
model_id=model,
|
||||||
)
|
)
|
||||||
bytes_combined = b"".join([byte_seg async for byte_seg in audio])
|
bytes_combined = b"".join([byte_seg async for byte_seg in audio])
|
||||||
|
|
||||||
except ApiError as exc:
|
except ApiError as exc:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Error during processing of TTS request %s", exc, exc_info=True
|
"Error during processing of TTS request %s", exc, exc_info=True
|
||||||
|
@ -26,6 +26,7 @@ from homeassistant.const import (
|
|||||||
EVENT_THEMES_UPDATED,
|
EVENT_THEMES_UPDATED,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import config_validation as cv, service
|
from homeassistant.helpers import config_validation as cv, service
|
||||||
from homeassistant.helpers.icon import async_get_icons
|
from homeassistant.helpers.icon import async_get_icons
|
||||||
from homeassistant.helpers.json import json_dumps_sorted
|
from homeassistant.helpers.json import json_dumps_sorted
|
||||||
@ -543,6 +544,12 @@ async def _async_setup_themes(
|
|||||||
"""Reload themes."""
|
"""Reload themes."""
|
||||||
config = await async_hass_config_yaml(hass)
|
config = await async_hass_config_yaml(hass)
|
||||||
new_themes = config.get(DOMAIN, {}).get(CONF_THEMES, {})
|
new_themes = config.get(DOMAIN, {}).get(CONF_THEMES, {})
|
||||||
|
|
||||||
|
try:
|
||||||
|
THEME_SCHEMA(new_themes)
|
||||||
|
except vol.Invalid as err:
|
||||||
|
raise HomeAssistantError(f"Failed to reload themes: {err}") from err
|
||||||
|
|
||||||
hass.data[DATA_THEMES] = new_themes
|
hass.data[DATA_THEMES] = new_themes
|
||||||
if hass.data[DATA_DEFAULT_THEME] not in new_themes:
|
if hass.data[DATA_DEFAULT_THEME] not in new_themes:
|
||||||
hass.data[DATA_DEFAULT_THEME] = DEFAULT_THEME
|
hass.data[DATA_DEFAULT_THEME] = DEFAULT_THEME
|
||||||
|
@ -306,6 +306,11 @@ class WebRTCProvider(CameraWebRTCProvider):
|
|||||||
await self.teardown()
|
await self.teardown()
|
||||||
raise HomeAssistantError("Camera has no stream source")
|
raise HomeAssistantError("Camera has no stream source")
|
||||||
|
|
||||||
|
if camera.platform.platform_name == "generic":
|
||||||
|
# This is a workaround to use ffmpeg for generic cameras
|
||||||
|
# A proper fix will be added in the future together with supporting multiple streams per camera
|
||||||
|
stream_source = "ffmpeg:" + stream_source
|
||||||
|
|
||||||
if not self.async_is_supported(stream_source):
|
if not self.async_is_supported(stream_source):
|
||||||
await self.teardown()
|
await self.teardown()
|
||||||
raise HomeAssistantError("Stream source is not supported by go2rtc")
|
raise HomeAssistantError("Stream source is not supported by go2rtc")
|
||||||
|
@ -113,9 +113,7 @@ class HomematicipHAP:
|
|||||||
|
|
||||||
self._ws_close_requested = False
|
self._ws_close_requested = False
|
||||||
self._ws_connection_closed = asyncio.Event()
|
self._ws_connection_closed = asyncio.Event()
|
||||||
self._retry_task: asyncio.Task | None = None
|
self._get_state_task: asyncio.Task | None = None
|
||||||
self._tries = 0
|
|
||||||
self._accesspoint_connected = True
|
|
||||||
self.hmip_device_by_entity_id: dict[str, Any] = {}
|
self.hmip_device_by_entity_id: dict[str, Any] = {}
|
||||||
self.reset_connection_listener: Callable | None = None
|
self.reset_connection_listener: Callable | None = None
|
||||||
|
|
||||||
@ -161,17 +159,8 @@ class HomematicipHAP:
|
|||||||
"""
|
"""
|
||||||
if not self.home.connected:
|
if not self.home.connected:
|
||||||
_LOGGER.error("HMIP access point has lost connection with the cloud")
|
_LOGGER.error("HMIP access point has lost connection with the cloud")
|
||||||
self._accesspoint_connected = False
|
self._ws_connection_closed.set()
|
||||||
self.set_all_to_unavailable()
|
self.set_all_to_unavailable()
|
||||||
elif not self._accesspoint_connected:
|
|
||||||
# Now the HOME_CHANGED event has fired indicating the access
|
|
||||||
# point has reconnected to the cloud again.
|
|
||||||
# Explicitly getting an update as entity states might have
|
|
||||||
# changed during access point disconnect."""
|
|
||||||
|
|
||||||
job = self.hass.async_create_task(self.get_state())
|
|
||||||
job.add_done_callback(self.get_state_finished)
|
|
||||||
self._accesspoint_connected = True
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_create_entity(self, *args, **kwargs) -> None:
|
def async_create_entity(self, *args, **kwargs) -> None:
|
||||||
@ -185,20 +174,43 @@ class HomematicipHAP:
|
|||||||
await asyncio.sleep(30)
|
await asyncio.sleep(30)
|
||||||
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||||
|
|
||||||
|
async def _try_get_state(self) -> None:
|
||||||
|
"""Call get_state in a loop until no error occurs, using exponential backoff on error."""
|
||||||
|
|
||||||
|
# Wait until WebSocket connection is established.
|
||||||
|
while not self.home.websocket_is_connected():
|
||||||
|
await asyncio.sleep(2)
|
||||||
|
|
||||||
|
delay = 8
|
||||||
|
max_delay = 1500
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
await self.get_state()
|
||||||
|
break
|
||||||
|
except HmipConnectionError as err:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Get_state failed, retrying in %s seconds: %s", delay, err
|
||||||
|
)
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
delay = min(delay * 2, max_delay)
|
||||||
|
|
||||||
async def get_state(self) -> None:
|
async def get_state(self) -> None:
|
||||||
"""Update HMIP state and tell Home Assistant."""
|
"""Update HMIP state and tell Home Assistant."""
|
||||||
await self.home.get_current_state_async()
|
await self.home.get_current_state_async()
|
||||||
self.update_all()
|
self.update_all()
|
||||||
|
|
||||||
def get_state_finished(self, future) -> None:
|
def get_state_finished(self, future) -> None:
|
||||||
"""Execute when get_state coroutine has finished."""
|
"""Execute when try_get_state coroutine has finished."""
|
||||||
try:
|
try:
|
||||||
future.result()
|
future.result()
|
||||||
except HmipConnectionError:
|
except Exception as err: # noqa: BLE001
|
||||||
# Somehow connection could not recover. Will disconnect and
|
_LOGGER.error(
|
||||||
# so reconnect loop is taking over.
|
"Error updating state after HMIP access point reconnect: %s", err
|
||||||
_LOGGER.error("Updating state after HMIP access point reconnect failed")
|
)
|
||||||
self.hass.async_create_task(self.home.disable_events())
|
else:
|
||||||
|
_LOGGER.info(
|
||||||
|
"Updating state after HMIP access point reconnect finished successfully",
|
||||||
|
)
|
||||||
|
|
||||||
def set_all_to_unavailable(self) -> None:
|
def set_all_to_unavailable(self) -> None:
|
||||||
"""Set all devices to unavailable and tell Home Assistant."""
|
"""Set all devices to unavailable and tell Home Assistant."""
|
||||||
@ -222,8 +234,8 @@ class HomematicipHAP:
|
|||||||
async def async_reset(self) -> bool:
|
async def async_reset(self) -> bool:
|
||||||
"""Close the websocket connection."""
|
"""Close the websocket connection."""
|
||||||
self._ws_close_requested = True
|
self._ws_close_requested = True
|
||||||
if self._retry_task is not None:
|
if self._get_state_task is not None:
|
||||||
self._retry_task.cancel()
|
self._get_state_task.cancel()
|
||||||
await self.home.disable_events_async()
|
await self.home.disable_events_async()
|
||||||
_LOGGER.debug("Closed connection to HomematicIP cloud server")
|
_LOGGER.debug("Closed connection to HomematicIP cloud server")
|
||||||
await self.hass.config_entries.async_unload_platforms(
|
await self.hass.config_entries.async_unload_platforms(
|
||||||
@ -247,7 +259,9 @@ class HomematicipHAP:
|
|||||||
"""Handle websocket connected."""
|
"""Handle websocket connected."""
|
||||||
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
|
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
|
||||||
if self._ws_connection_closed.is_set():
|
if self._ws_connection_closed.is_set():
|
||||||
await self.get_state()
|
self._get_state_task = self.hass.async_create_task(self._try_get_state())
|
||||||
|
self._get_state_task.add_done_callback(self.get_state_finished)
|
||||||
|
|
||||||
self._ws_connection_closed.clear()
|
self._ws_connection_closed.clear()
|
||||||
|
|
||||||
async def ws_disconnected_handler(self) -> None:
|
async def ws_disconnected_handler(self) -> None:
|
||||||
@ -256,11 +270,12 @@ class HomematicipHAP:
|
|||||||
self._ws_connection_closed.set()
|
self._ws_connection_closed.set()
|
||||||
|
|
||||||
async def ws_reconnected_handler(self, reason: str) -> None:
|
async def ws_reconnected_handler(self, reason: str) -> None:
|
||||||
"""Handle websocket reconnection."""
|
"""Handle websocket reconnection. Is called when Websocket tries to reconnect."""
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Websocket connection to HomematicIP Cloud re-established due to reason: %s",
|
"Websocket connection to HomematicIP Cloud trying to reconnect due to reason: %s",
|
||||||
reason,
|
reason,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._ws_connection_closed.set()
|
self._ws_connection_closed.set()
|
||||||
|
|
||||||
async def get_hap(
|
async def get_hap(
|
||||||
|
@ -2,13 +2,20 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from homematicip.base.enums import DeviceType, OpticalSignalBehaviour, RGBColorState
|
from homematicip.base.enums import (
|
||||||
|
DeviceType,
|
||||||
|
FunctionalChannelType,
|
||||||
|
OpticalSignalBehaviour,
|
||||||
|
RGBColorState,
|
||||||
|
)
|
||||||
from homematicip.base.functionalChannels import NotificationLightChannel
|
from homematicip.base.functionalChannels import NotificationLightChannel
|
||||||
from homematicip.device import (
|
from homematicip.device import (
|
||||||
BrandDimmer,
|
BrandDimmer,
|
||||||
BrandSwitchNotificationLight,
|
BrandSwitchNotificationLight,
|
||||||
|
Device,
|
||||||
Dimmer,
|
Dimmer,
|
||||||
DinRailDimmer3,
|
DinRailDimmer3,
|
||||||
FullFlushDimmer,
|
FullFlushDimmer,
|
||||||
@ -34,6 +41,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from .entity import HomematicipGenericEntity
|
from .entity import HomematicipGenericEntity
|
||||||
from .hap import HomematicIPConfigEntry, HomematicipHAP
|
from .hap import HomematicIPConfigEntry, HomematicipHAP
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@ -43,6 +52,14 @@ async def async_setup_entry(
|
|||||||
"""Set up the HomematicIP Cloud lights from a config entry."""
|
"""Set up the HomematicIP Cloud lights from a config entry."""
|
||||||
hap = config_entry.runtime_data
|
hap = config_entry.runtime_data
|
||||||
entities: list[HomematicipGenericEntity] = []
|
entities: list[HomematicipGenericEntity] = []
|
||||||
|
|
||||||
|
entities.extend(
|
||||||
|
HomematicipLightHS(hap, d, ch.index)
|
||||||
|
for d in hap.home.devices
|
||||||
|
for ch in d.functionalChannels
|
||||||
|
if ch.functionalChannelType == FunctionalChannelType.UNIVERSAL_LIGHT_CHANNEL
|
||||||
|
)
|
||||||
|
|
||||||
for device in hap.home.devices:
|
for device in hap.home.devices:
|
||||||
if (
|
if (
|
||||||
isinstance(device, SwitchMeasuring)
|
isinstance(device, SwitchMeasuring)
|
||||||
@ -104,6 +121,64 @@ class HomematicipLight(HomematicipGenericEntity, LightEntity):
|
|||||||
await self._device.turn_off_async()
|
await self._device.turn_off_async()
|
||||||
|
|
||||||
|
|
||||||
|
class HomematicipLightHS(HomematicipGenericEntity, LightEntity):
|
||||||
|
"""Representation of the HomematicIP light with HS color mode."""
|
||||||
|
|
||||||
|
_attr_color_mode = ColorMode.HS
|
||||||
|
_attr_supported_color_modes = {ColorMode.HS}
|
||||||
|
|
||||||
|
def __init__(self, hap: HomematicipHAP, device: Device, channel_index: int) -> None:
|
||||||
|
"""Initialize the light entity."""
|
||||||
|
super().__init__(hap, device, channel=channel_index, is_multi_channel=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return true if light is on."""
|
||||||
|
return self.functional_channel.on
|
||||||
|
|
||||||
|
@property
|
||||||
|
def brightness(self) -> int | None:
|
||||||
|
"""Return the current brightness."""
|
||||||
|
return int(self.functional_channel.dimLevel * 255.0)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hs_color(self) -> tuple[float, float] | None:
|
||||||
|
"""Return the hue and saturation color value [float, float]."""
|
||||||
|
if (
|
||||||
|
self.functional_channel.hue is None
|
||||||
|
or self.functional_channel.saturationLevel is None
|
||||||
|
):
|
||||||
|
return None
|
||||||
|
return (
|
||||||
|
self.functional_channel.hue,
|
||||||
|
self.functional_channel.saturationLevel * 100.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the light on."""
|
||||||
|
|
||||||
|
hs_color = kwargs.get(ATTR_HS_COLOR, (0.0, 0.0))
|
||||||
|
hue = hs_color[0] % 360.0
|
||||||
|
saturation = hs_color[1] / 100.0
|
||||||
|
dim_level = round(kwargs.get(ATTR_BRIGHTNESS, 255) / 255.0, 2)
|
||||||
|
|
||||||
|
if ATTR_HS_COLOR not in kwargs:
|
||||||
|
hue = self.functional_channel.hue
|
||||||
|
saturation = self.functional_channel.saturationLevel
|
||||||
|
|
||||||
|
if ATTR_BRIGHTNESS not in kwargs:
|
||||||
|
# If no brightness is set, use the current brightness
|
||||||
|
dim_level = self.functional_channel.dimLevel or 1.0
|
||||||
|
|
||||||
|
await self.functional_channel.set_hue_saturation_dim_level_async(
|
||||||
|
hue=hue, saturation_level=saturation, dim_level=dim_level
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the light off."""
|
||||||
|
await self.functional_channel.set_switch_state_async(on=False)
|
||||||
|
|
||||||
|
|
||||||
class HomematicipLightMeasuring(HomematicipLight):
|
class HomematicipLightMeasuring(HomematicipLight):
|
||||||
"""Representation of the HomematicIP measuring light."""
|
"""Representation of the HomematicIP measuring light."""
|
||||||
|
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["homematicip"],
|
"loggers": ["homematicip"],
|
||||||
"requirements": ["homematicip==2.0.6"]
|
"requirements": ["homematicip==2.0.7"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
|
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"quality_scale": "silver",
|
"quality_scale": "silver",
|
||||||
"requirements": ["imgw_pib==1.2.0"]
|
"requirements": ["imgw_pib==1.4.0"]
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.components.media_player import (
|
from homeassistant.components.media_player import (
|
||||||
@ -21,6 +22,8 @@ from .const import CONTENT_TYPE_MAP, LOGGER, MAX_IMAGE_WIDTH
|
|||||||
from .coordinator import JellyfinConfigEntry, JellyfinDataUpdateCoordinator
|
from .coordinator import JellyfinConfigEntry, JellyfinDataUpdateCoordinator
|
||||||
from .entity import JellyfinClientEntity
|
from .entity import JellyfinClientEntity
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@ -177,10 +180,15 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
|||||||
def supported_features(self) -> MediaPlayerEntityFeature:
|
def supported_features(self) -> MediaPlayerEntityFeature:
|
||||||
"""Flag media player features that are supported."""
|
"""Flag media player features that are supported."""
|
||||||
commands: list[str] = self.capabilities.get("SupportedCommands", [])
|
commands: list[str] = self.capabilities.get("SupportedCommands", [])
|
||||||
controllable = self.capabilities.get("SupportsMediaControl", False)
|
_LOGGER.debug(
|
||||||
|
"Supported commands for device %s, client %s, %s",
|
||||||
|
self.device_name,
|
||||||
|
self.client_name,
|
||||||
|
commands,
|
||||||
|
)
|
||||||
features = MediaPlayerEntityFeature(0)
|
features = MediaPlayerEntityFeature(0)
|
||||||
|
|
||||||
if controllable:
|
if "PlayMediaSource" in commands:
|
||||||
features |= (
|
features |= (
|
||||||
MediaPlayerEntityFeature.BROWSE_MEDIA
|
MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||||
|
@ -2,6 +2,9 @@
|
|||||||
"entity_component": {
|
"entity_component": {
|
||||||
"_": {
|
"_": {
|
||||||
"default": "mdi:lightbulb",
|
"default": "mdi:lightbulb",
|
||||||
|
"state": {
|
||||||
|
"off": "mdi:lightbulb-off"
|
||||||
|
},
|
||||||
"state_attributes": {
|
"state_attributes": {
|
||||||
"effect": {
|
"effect": {
|
||||||
"default": "mdi:circle-medium",
|
"default": "mdi:circle-medium",
|
||||||
|
@ -16,6 +16,11 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
|
|||||||
|
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_unique_id(device_id: str, description: EntityDescription) -> str:
|
||||||
|
"""Generate a unique ID for the entity."""
|
||||||
|
return f"{device_id}-{description.key}"
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: MieleDataUpdateCoordinator,
|
coordinator: MieleDataUpdateCoordinator,
|
||||||
@ -26,7 +31,7 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
|
|||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
self._device_id = device_id
|
self._device_id = device_id
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
self._attr_unique_id = f"{device_id}-{description.key}"
|
self._attr_unique_id = MieleEntity.get_unique_id(device_id, description)
|
||||||
|
|
||||||
device = self.device
|
device = self.device
|
||||||
appliance_type = DEVICE_TYPE_TAGS.get(MieleAppliance(device.device_type))
|
appliance_type = DEVICE_TYPE_TAGS.get(MieleAppliance(device.device_type))
|
||||||
|
@ -7,7 +7,7 @@ from dataclasses import dataclass
|
|||||||
import logging
|
import logging
|
||||||
from typing import Final, cast
|
from typing import Final, cast
|
||||||
|
|
||||||
from pymiele import MieleDevice
|
from pymiele import MieleDevice, MieleTemperature
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
@ -25,10 +25,13 @@ from homeassistant.const import (
|
|||||||
UnitOfVolume,
|
UnitOfVolume,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.typing import StateType
|
from homeassistant.helpers.typing import StateType
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
|
DISABLED_TEMP_ENTITIES,
|
||||||
|
DOMAIN,
|
||||||
STATE_PROGRAM_ID,
|
STATE_PROGRAM_ID,
|
||||||
STATE_PROGRAM_PHASE,
|
STATE_PROGRAM_PHASE,
|
||||||
STATE_STATUS_TAGS,
|
STATE_STATUS_TAGS,
|
||||||
@ -45,8 +48,6 @@ PARALLEL_UPDATES = 0
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DISABLED_TEMPERATURE = -32768
|
|
||||||
|
|
||||||
DEFAULT_PLATE_COUNT = 4
|
DEFAULT_PLATE_COUNT = 4
|
||||||
|
|
||||||
PLATE_COUNT = {
|
PLATE_COUNT = {
|
||||||
@ -75,12 +76,25 @@ def _convert_duration(value_list: list[int]) -> int | None:
|
|||||||
return value_list[0] * 60 + value_list[1] if value_list else None
|
return value_list[0] * 60 + value_list[1] if value_list else None
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_temperature(
|
||||||
|
value_list: list[MieleTemperature], index: int
|
||||||
|
) -> float | None:
|
||||||
|
"""Convert temperature object to readable value."""
|
||||||
|
if index >= len(value_list):
|
||||||
|
return None
|
||||||
|
raw_value = cast(int, value_list[index].temperature) / 100.0
|
||||||
|
if raw_value in DISABLED_TEMP_ENTITIES:
|
||||||
|
return None
|
||||||
|
return raw_value
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
class MieleSensorDescription(SensorEntityDescription):
|
class MieleSensorDescription(SensorEntityDescription):
|
||||||
"""Class describing Miele sensor entities."""
|
"""Class describing Miele sensor entities."""
|
||||||
|
|
||||||
value_fn: Callable[[MieleDevice], StateType]
|
value_fn: Callable[[MieleDevice], StateType]
|
||||||
zone: int = 1
|
zone: int | None = None
|
||||||
|
unique_id_fn: Callable[[str, MieleSensorDescription], str] | None = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -404,32 +418,20 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
|||||||
),
|
),
|
||||||
description=MieleSensorDescription(
|
description=MieleSensorDescription(
|
||||||
key="state_temperature_1",
|
key="state_temperature_1",
|
||||||
|
zone=1,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=lambda value: cast(int, value.state_temperatures[0].temperature)
|
value_fn=lambda value: _convert_temperature(value.state_temperatures, 0),
|
||||||
/ 100.0,
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
MieleSensorDefinition(
|
MieleSensorDefinition(
|
||||||
types=(
|
types=(
|
||||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
|
||||||
MieleAppliance.OVEN,
|
|
||||||
MieleAppliance.OVEN_MICROWAVE,
|
|
||||||
MieleAppliance.DISH_WARMER,
|
|
||||||
MieleAppliance.STEAM_OVEN,
|
|
||||||
MieleAppliance.MICROWAVE,
|
|
||||||
MieleAppliance.FRIDGE,
|
|
||||||
MieleAppliance.FREEZER,
|
|
||||||
MieleAppliance.FRIDGE_FREEZER,
|
MieleAppliance.FRIDGE_FREEZER,
|
||||||
MieleAppliance.STEAM_OVEN_COMBI,
|
|
||||||
MieleAppliance.WINE_CABINET,
|
MieleAppliance.WINE_CABINET,
|
||||||
MieleAppliance.WINE_CONDITIONING_UNIT,
|
MieleAppliance.WINE_CONDITIONING_UNIT,
|
||||||
MieleAppliance.WINE_STORAGE_CONDITIONING_UNIT,
|
MieleAppliance.WINE_STORAGE_CONDITIONING_UNIT,
|
||||||
MieleAppliance.STEAM_OVEN_MICRO,
|
|
||||||
MieleAppliance.DIALOG_OVEN,
|
|
||||||
MieleAppliance.WINE_CABINET_FREEZER,
|
MieleAppliance.WINE_CABINET_FREEZER,
|
||||||
MieleAppliance.STEAM_OVEN_MK2,
|
|
||||||
),
|
),
|
||||||
description=MieleSensorDescription(
|
description=MieleSensorDescription(
|
||||||
key="state_temperature_2",
|
key="state_temperature_2",
|
||||||
@ -438,7 +440,24 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
|||||||
translation_key="temperature_zone_2",
|
translation_key="temperature_zone_2",
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=lambda value: value.state_temperatures[1].temperature / 100.0, # type: ignore [operator]
|
value_fn=lambda value: _convert_temperature(value.state_temperatures, 1),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
MieleSensorDefinition(
|
||||||
|
types=(
|
||||||
|
MieleAppliance.WINE_CABINET,
|
||||||
|
MieleAppliance.WINE_CONDITIONING_UNIT,
|
||||||
|
MieleAppliance.WINE_STORAGE_CONDITIONING_UNIT,
|
||||||
|
MieleAppliance.WINE_CABINET_FREEZER,
|
||||||
|
),
|
||||||
|
description=MieleSensorDescription(
|
||||||
|
key="state_temperature_3",
|
||||||
|
zone=3,
|
||||||
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
|
translation_key="temperature_zone_3",
|
||||||
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
value_fn=lambda value: _convert_temperature(value.state_temperatures, 2),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
MieleSensorDefinition(
|
MieleSensorDefinition(
|
||||||
@ -454,11 +473,8 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
|||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=(
|
value_fn=lambda value: _convert_temperature(
|
||||||
lambda value: cast(
|
value.state_core_target_temperature, 0
|
||||||
int, value.state_core_target_temperature[0].temperature
|
|
||||||
)
|
|
||||||
/ 100.0
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -479,9 +495,8 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
|||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=(
|
value_fn=lambda value: _convert_temperature(
|
||||||
lambda value: cast(int, value.state_target_temperature[0].temperature)
|
value.state_target_temperature, 0
|
||||||
/ 100.0
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -497,9 +512,8 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
|||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=(
|
value_fn=lambda value: _convert_temperature(
|
||||||
lambda value: cast(int, value.state_core_temperature[0].temperature)
|
value.state_core_temperature, 0
|
||||||
/ 100.0
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -518,6 +532,8 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
|||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=sorted(PlatePowerStep.keys()),
|
options=sorted(PlatePowerStep.keys()),
|
||||||
value_fn=lambda value: None,
|
value_fn=lambda value: None,
|
||||||
|
unique_id_fn=lambda device_id,
|
||||||
|
description: f"{device_id}-{description.key}-{description.zone}",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
for i in range(1, 7)
|
for i in range(1, 7)
|
||||||
@ -539,6 +555,16 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
|||||||
options=sorted(StateDryingStep.keys()),
|
options=sorted(StateDryingStep.keys()),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
MieleSensorDefinition(
|
||||||
|
types=(MieleAppliance.ROBOT_VACUUM_CLEANER,),
|
||||||
|
description=MieleSensorDescription(
|
||||||
|
key="state_battery",
|
||||||
|
value_fn=lambda value: value.state_battery_level,
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
device_class=SensorDeviceClass.BATTERY,
|
||||||
|
),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -549,10 +575,52 @@ async def async_setup_entry(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the sensor platform."""
|
"""Set up the sensor platform."""
|
||||||
coordinator = config_entry.runtime_data
|
coordinator = config_entry.runtime_data
|
||||||
added_devices: set[str] = set()
|
added_devices: set[str] = set() # device_id
|
||||||
|
added_entities: set[str] = set() # unique_id
|
||||||
|
|
||||||
def _async_add_new_devices() -> None:
|
def _get_entity_class(definition: MieleSensorDefinition) -> type[MieleSensor]:
|
||||||
nonlocal added_devices
|
"""Get the entity class for the sensor."""
|
||||||
|
return {
|
||||||
|
"state_status": MieleStatusSensor,
|
||||||
|
"state_program_id": MieleProgramIdSensor,
|
||||||
|
"state_program_phase": MielePhaseSensor,
|
||||||
|
"state_plate_step": MielePlateSensor,
|
||||||
|
}.get(definition.description.key, MieleSensor)
|
||||||
|
|
||||||
|
def _is_entity_registered(unique_id: str) -> bool:
|
||||||
|
"""Check if the entity is already registered."""
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
|
return any(
|
||||||
|
entry.platform == DOMAIN and entry.unique_id == unique_id
|
||||||
|
for entry in entity_registry.entities.values()
|
||||||
|
)
|
||||||
|
|
||||||
|
def _is_sensor_enabled(
|
||||||
|
definition: MieleSensorDefinition,
|
||||||
|
device: MieleDevice,
|
||||||
|
unique_id: str,
|
||||||
|
) -> bool:
|
||||||
|
"""Check if the sensor is enabled."""
|
||||||
|
if (
|
||||||
|
definition.description.device_class == SensorDeviceClass.TEMPERATURE
|
||||||
|
and definition.description.value_fn(device) is None
|
||||||
|
and definition.description.zone != 1
|
||||||
|
):
|
||||||
|
# all appliances supporting temperature have at least zone 1, for other zones
|
||||||
|
# don't create entity if API signals that datapoint is disabled, unless the sensor
|
||||||
|
# already appeared in the past (= it provided a valid value)
|
||||||
|
return _is_entity_registered(unique_id)
|
||||||
|
if (
|
||||||
|
definition.description.key == "state_plate_step"
|
||||||
|
and definition.description.zone is not None
|
||||||
|
and definition.description.zone > _get_plate_count(device.tech_type)
|
||||||
|
):
|
||||||
|
# don't create plate entity if not expected by the appliance tech type
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _async_add_devices() -> None:
|
||||||
|
nonlocal added_devices, added_entities
|
||||||
entities: list = []
|
entities: list = []
|
||||||
entity_class: type[MieleSensor]
|
entity_class: type[MieleSensor]
|
||||||
new_devices_set, current_devices = coordinator.async_add_devices(added_devices)
|
new_devices_set, current_devices = coordinator.async_add_devices(added_devices)
|
||||||
@ -560,40 +628,35 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
for device_id, device in coordinator.data.devices.items():
|
for device_id, device in coordinator.data.devices.items():
|
||||||
for definition in SENSOR_TYPES:
|
for definition in SENSOR_TYPES:
|
||||||
if (
|
# device is not supported, skip
|
||||||
device_id in new_devices_set
|
if device.device_type not in definition.types:
|
||||||
and device.device_type in definition.types
|
|
||||||
):
|
|
||||||
match definition.description.key:
|
|
||||||
case "state_status":
|
|
||||||
entity_class = MieleStatusSensor
|
|
||||||
case "state_program_id":
|
|
||||||
entity_class = MieleProgramIdSensor
|
|
||||||
case "state_program_phase":
|
|
||||||
entity_class = MielePhaseSensor
|
|
||||||
case "state_plate_step":
|
|
||||||
entity_class = MielePlateSensor
|
|
||||||
case _:
|
|
||||||
entity_class = MieleSensor
|
|
||||||
if (
|
|
||||||
definition.description.device_class
|
|
||||||
== SensorDeviceClass.TEMPERATURE
|
|
||||||
and definition.description.value_fn(device)
|
|
||||||
== DISABLED_TEMPERATURE / 100
|
|
||||||
) or (
|
|
||||||
definition.description.key == "state_plate_step"
|
|
||||||
and definition.description.zone
|
|
||||||
> _get_plate_count(device.tech_type)
|
|
||||||
):
|
|
||||||
# Don't create entity if API signals that datapoint is disabled
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
entity_class = _get_entity_class(definition)
|
||||||
|
unique_id = (
|
||||||
|
definition.description.unique_id_fn(
|
||||||
|
device_id, definition.description
|
||||||
|
)
|
||||||
|
if definition.description.unique_id_fn is not None
|
||||||
|
else MieleEntity.get_unique_id(device_id, definition.description)
|
||||||
|
)
|
||||||
|
|
||||||
|
# entity was already added, skip
|
||||||
|
if device_id not in new_devices_set and unique_id in added_entities:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# sensors is not enabled, skip
|
||||||
|
if not _is_sensor_enabled(definition, device, unique_id):
|
||||||
|
continue
|
||||||
|
|
||||||
|
added_entities.add(unique_id)
|
||||||
entities.append(
|
entities.append(
|
||||||
entity_class(coordinator, device_id, definition.description)
|
entity_class(coordinator, device_id, definition.description)
|
||||||
)
|
)
|
||||||
async_add_entities(entities)
|
async_add_entities(entities)
|
||||||
|
|
||||||
config_entry.async_on_unload(coordinator.async_add_listener(_async_add_new_devices))
|
config_entry.async_on_unload(coordinator.async_add_listener(_async_add_devices))
|
||||||
_async_add_new_devices()
|
_async_add_devices()
|
||||||
|
|
||||||
|
|
||||||
APPLIANCE_ICONS = {
|
APPLIANCE_ICONS = {
|
||||||
@ -631,6 +694,17 @@ class MieleSensor(MieleEntity, SensorEntity):
|
|||||||
|
|
||||||
entity_description: MieleSensorDescription
|
entity_description: MieleSensorDescription
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: MieleDataUpdateCoordinator,
|
||||||
|
device_id: str,
|
||||||
|
description: MieleSensorDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the sensor."""
|
||||||
|
super().__init__(coordinator, device_id, description)
|
||||||
|
if description.unique_id_fn is not None:
|
||||||
|
self._attr_unique_id = description.unique_id_fn(device_id, description)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_value(self) -> StateType:
|
def native_value(self) -> StateType:
|
||||||
"""Return the state of the sensor."""
|
"""Return the state of the sensor."""
|
||||||
@ -642,16 +716,6 @@ class MielePlateSensor(MieleSensor):
|
|||||||
|
|
||||||
entity_description: MieleSensorDescription
|
entity_description: MieleSensorDescription
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
coordinator: MieleDataUpdateCoordinator,
|
|
||||||
device_id: str,
|
|
||||||
description: MieleSensorDescription,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize the plate sensor."""
|
|
||||||
super().__init__(coordinator, device_id, description)
|
|
||||||
self._attr_unique_id = f"{device_id}-{description.key}-{description.zone}"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_value(self) -> StateType:
|
def native_value(self) -> StateType:
|
||||||
"""Return the state of the plate sensor."""
|
"""Return the state of the plate sensor."""
|
||||||
@ -662,7 +726,7 @@ class MielePlateSensor(MieleSensor):
|
|||||||
cast(
|
cast(
|
||||||
int,
|
int,
|
||||||
self.device.state_plate_step[
|
self.device.state_plate_step[
|
||||||
self.entity_description.zone - 1
|
cast(int, self.entity_description.zone) - 1
|
||||||
].value_raw,
|
].value_raw,
|
||||||
)
|
)
|
||||||
).name
|
).name
|
||||||
|
@ -87,7 +87,6 @@ class MieleVacuumStateCode(MieleEnum):
|
|||||||
|
|
||||||
SUPPORTED_FEATURES = (
|
SUPPORTED_FEATURES = (
|
||||||
VacuumEntityFeature.STATE
|
VacuumEntityFeature.STATE
|
||||||
| VacuumEntityFeature.BATTERY
|
|
||||||
| VacuumEntityFeature.FAN_SPEED
|
| VacuumEntityFeature.FAN_SPEED
|
||||||
| VacuumEntityFeature.START
|
| VacuumEntityFeature.START
|
||||||
| VacuumEntityFeature.STOP
|
| VacuumEntityFeature.STOP
|
||||||
@ -174,11 +173,6 @@ class MieleVacuum(MieleEntity, StateVacuumEntity):
|
|||||||
MieleVacuumStateCode(self.device.state_program_phase).value
|
MieleVacuumStateCode(self.device.state_program_phase).value
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def battery_level(self) -> int | None:
|
|
||||||
"""Return the battery level."""
|
|
||||||
return self.device.state_battery_level
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fan_speed(self) -> str | None:
|
def fan_speed(self) -> str | None:
|
||||||
"""Return the fan speed."""
|
"""Return the fan speed."""
|
||||||
|
@ -19,7 +19,7 @@ from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL
|
|||||||
from .coordinator import NASwebCoordinator
|
from .coordinator import NASwebCoordinator
|
||||||
from .nasweb_data import NASwebData
|
from .nasweb_data import NASwebData
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.SWITCH]
|
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.SWITCH]
|
||||||
|
|
||||||
NASWEB_CONFIG_URL = "https://{host}/page"
|
NASWEB_CONFIG_URL = "https://{host}/page"
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Constants for the NASweb integration."""
|
"""Constants for the NASweb integration."""
|
||||||
|
|
||||||
DOMAIN = "nasweb"
|
DOMAIN = "nasweb"
|
||||||
|
KEY_TEMP_SENSOR = "temp_sensor"
|
||||||
MANUFACTURER = "chomtech.pl"
|
MANUFACTURER = "chomtech.pl"
|
||||||
STATUS_UPDATE_MAX_TIME_INTERVAL = 60
|
STATUS_UPDATE_MAX_TIME_INTERVAL = 60
|
||||||
SUPPORT_EMAIL = "support@chomtech.eu"
|
SUPPORT_EMAIL = "support@chomtech.eu"
|
||||||
|
@ -11,16 +11,19 @@ from typing import Any
|
|||||||
|
|
||||||
from aiohttp.web import Request, Response
|
from aiohttp.web import Request, Response
|
||||||
from webio_api import WebioAPI
|
from webio_api import WebioAPI
|
||||||
from webio_api.const import KEY_DEVICE_SERIAL, KEY_OUTPUTS, KEY_TYPE, TYPE_STATUS_UPDATE
|
from webio_api.const import KEY_DEVICE_SERIAL, KEY_TYPE, TYPE_STATUS_UPDATE
|
||||||
|
|
||||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||||
from homeassistant.helpers import event
|
from homeassistant.helpers import event
|
||||||
from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol
|
from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol
|
||||||
|
|
||||||
from .const import STATUS_UPDATE_MAX_TIME_INTERVAL
|
from .const import KEY_TEMP_SENSOR, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
KEY_INPUTS = "inputs"
|
||||||
|
KEY_OUTPUTS = "outputs"
|
||||||
|
|
||||||
|
|
||||||
class NotificationCoordinator:
|
class NotificationCoordinator:
|
||||||
"""Coordinator redirecting push notifications for this integration to appropriate NASwebCoordinator."""
|
"""Coordinator redirecting push notifications for this integration to appropriate NASwebCoordinator."""
|
||||||
@ -96,8 +99,11 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
|||||||
self._job = HassJob(self._handle_max_update_interval, job_name)
|
self._job = HassJob(self._handle_max_update_interval, job_name)
|
||||||
self._unsub_last_update_check: CALLBACK_TYPE | None = None
|
self._unsub_last_update_check: CALLBACK_TYPE | None = None
|
||||||
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}
|
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}
|
||||||
data: dict[str, Any] = {}
|
data: dict[str, Any] = {
|
||||||
data[KEY_OUTPUTS] = self.webio_api.outputs
|
KEY_OUTPUTS: self.webio_api.outputs,
|
||||||
|
KEY_INPUTS: self.webio_api.inputs,
|
||||||
|
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||||
|
}
|
||||||
self.async_set_updated_data(data)
|
self.async_set_updated_data(data)
|
||||||
|
|
||||||
def is_connection_confirmed(self) -> bool:
|
def is_connection_confirmed(self) -> bool:
|
||||||
@ -187,5 +193,9 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
|||||||
async def process_status_update(self, new_status: dict) -> None:
|
async def process_status_update(self, new_status: dict) -> None:
|
||||||
"""Process status update from NASweb."""
|
"""Process status update from NASweb."""
|
||||||
self.webio_api.update_device_status(new_status)
|
self.webio_api.update_device_status(new_status)
|
||||||
new_data = {KEY_OUTPUTS: self.webio_api.outputs}
|
new_data = {
|
||||||
|
KEY_OUTPUTS: self.webio_api.outputs,
|
||||||
|
KEY_INPUTS: self.webio_api.inputs,
|
||||||
|
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||||
|
}
|
||||||
self.async_set_updated_data(new_data)
|
self.async_set_updated_data(new_data)
|
||||||
|
15
homeassistant/components/nasweb/icons.json
Normal file
15
homeassistant/components/nasweb/icons.json
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"sensor_input": {
|
||||||
|
"default": "mdi:help-circle-outline",
|
||||||
|
"state": {
|
||||||
|
"tamper": "mdi:lock-alert",
|
||||||
|
"active": "mdi:alert",
|
||||||
|
"normal": "mdi:shield-check-outline",
|
||||||
|
"problem": "mdi:alert-circle"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
189
homeassistant/components/nasweb/sensor.py
Normal file
189
homeassistant/components/nasweb/sensor.py
Normal file
@ -0,0 +1,189 @@
|
|||||||
|
"""Platform for NASweb sensors."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
from webio_api import Input as NASwebInput, TempSensor
|
||||||
|
|
||||||
|
from homeassistant.components.sensor import (
|
||||||
|
DOMAIN as DOMAIN_SENSOR,
|
||||||
|
SensorDeviceClass,
|
||||||
|
SensorEntity,
|
||||||
|
SensorStateClass,
|
||||||
|
UnitOfTemperature,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
import homeassistant.helpers.entity_registry as er
|
||||||
|
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||||
|
from homeassistant.helpers.update_coordinator import (
|
||||||
|
BaseCoordinatorEntity,
|
||||||
|
BaseDataUpdateCoordinatorProtocol,
|
||||||
|
)
|
||||||
|
|
||||||
|
from . import NASwebConfigEntry
|
||||||
|
from .const import DOMAIN, KEY_TEMP_SENSOR, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||||
|
|
||||||
|
SENSOR_INPUT_TRANSLATION_KEY = "sensor_input"
|
||||||
|
STATE_UNDEFINED = "undefined"
|
||||||
|
STATE_TAMPER = "tamper"
|
||||||
|
STATE_ACTIVE = "active"
|
||||||
|
STATE_NORMAL = "normal"
|
||||||
|
STATE_PROBLEM = "problem"
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config: NASwebConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Set up Sensor platform."""
|
||||||
|
coordinator = config.runtime_data
|
||||||
|
current_inputs: set[int] = set()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _check_entities() -> None:
|
||||||
|
received_inputs: dict[int, NASwebInput] = {
|
||||||
|
entry.index: entry for entry in coordinator.webio_api.inputs
|
||||||
|
}
|
||||||
|
added = {i for i in received_inputs if i not in current_inputs}
|
||||||
|
removed = {i for i in current_inputs if i not in received_inputs}
|
||||||
|
entities_to_add: list[InputStateSensor] = []
|
||||||
|
for index in added:
|
||||||
|
webio_input = received_inputs[index]
|
||||||
|
if not isinstance(webio_input, NASwebInput):
|
||||||
|
_LOGGER.error("Cannot create InputStateSensor without NASwebInput")
|
||||||
|
continue
|
||||||
|
new_input = InputStateSensor(coordinator, webio_input)
|
||||||
|
entities_to_add.append(new_input)
|
||||||
|
current_inputs.add(index)
|
||||||
|
async_add_entities(entities_to_add)
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
|
for index in removed:
|
||||||
|
unique_id = f"{DOMAIN}.{config.unique_id}.input.{index}"
|
||||||
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
|
DOMAIN_SENSOR, DOMAIN, unique_id
|
||||||
|
):
|
||||||
|
entity_registry.async_remove(entity_id)
|
||||||
|
current_inputs.remove(index)
|
||||||
|
else:
|
||||||
|
_LOGGER.warning("Failed to remove old input: no entity_id")
|
||||||
|
|
||||||
|
coordinator.async_add_listener(_check_entities)
|
||||||
|
_check_entities()
|
||||||
|
|
||||||
|
nasweb_temp_sensor = coordinator.data[KEY_TEMP_SENSOR]
|
||||||
|
temp_sensor = TemperatureSensor(coordinator, nasweb_temp_sensor)
|
||||||
|
async_add_entities([temp_sensor])
|
||||||
|
|
||||||
|
|
||||||
|
class BaseSensorEntity(SensorEntity, BaseCoordinatorEntity):
|
||||||
|
"""Base class providing common functionality."""
|
||||||
|
|
||||||
|
def __init__(self, coordinator: BaseDataUpdateCoordinatorProtocol) -> None:
|
||||||
|
"""Initialize base sensor."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self._attr_available = False
|
||||||
|
self._attr_has_entity_name = True
|
||||||
|
self._attr_should_poll = False
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""When entity is added to hass."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
self._handle_coordinator_update()
|
||||||
|
|
||||||
|
def _set_attr_available(
|
||||||
|
self, entity_last_update: float, available: bool | None
|
||||||
|
) -> None:
|
||||||
|
if (
|
||||||
|
self.coordinator.last_update is None
|
||||||
|
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||||
|
):
|
||||||
|
self._attr_available = False
|
||||||
|
else:
|
||||||
|
self._attr_available = available if available is not None else False
|
||||||
|
|
||||||
|
async def async_update(self) -> None:
|
||||||
|
"""Update the entity.
|
||||||
|
|
||||||
|
Only used by the generic entity update service.
|
||||||
|
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class InputStateSensor(BaseSensorEntity):
|
||||||
|
"""Entity representing NASweb input."""
|
||||||
|
|
||||||
|
_attr_device_class = SensorDeviceClass.ENUM
|
||||||
|
_attr_options: list[str] = [
|
||||||
|
STATE_UNDEFINED,
|
||||||
|
STATE_TAMPER,
|
||||||
|
STATE_ACTIVE,
|
||||||
|
STATE_NORMAL,
|
||||||
|
STATE_PROBLEM,
|
||||||
|
]
|
||||||
|
_attr_translation_key = SENSOR_INPUT_TRANSLATION_KEY
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: BaseDataUpdateCoordinatorProtocol,
|
||||||
|
nasweb_input: NASwebInput,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize InputStateSensor entity."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self._input = nasweb_input
|
||||||
|
self._attr_native_value: str | None = None
|
||||||
|
self._attr_translation_placeholders = {"index": f"{nasweb_input.index:2d}"}
|
||||||
|
self._attr_unique_id = (
|
||||||
|
f"{DOMAIN}.{self._input.webio_serial}.input.{self._input.index}"
|
||||||
|
)
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, self._input.webio_serial)},
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_coordinator_update(self) -> None:
|
||||||
|
"""Handle updated data from the coordinator."""
|
||||||
|
if self._input.state is None or self._input.state in self._attr_options:
|
||||||
|
self._attr_native_value = self._input.state
|
||||||
|
else:
|
||||||
|
_LOGGER.warning("Received unrecognized input state: %s", self._input.state)
|
||||||
|
self._attr_native_value = None
|
||||||
|
self._set_attr_available(self._input.last_update, self._input.available)
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
|
||||||
|
class TemperatureSensor(BaseSensorEntity):
|
||||||
|
"""Entity representing NASweb temperature sensor."""
|
||||||
|
|
||||||
|
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||||
|
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||||
|
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: BaseDataUpdateCoordinatorProtocol,
|
||||||
|
nasweb_temp_sensor: TempSensor,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize TemperatureSensor entity."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self._temp_sensor = nasweb_temp_sensor
|
||||||
|
self._attr_unique_id = f"{DOMAIN}.{self._temp_sensor.webio_serial}.temp_sensor"
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, self._temp_sensor.webio_serial)}
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_coordinator_update(self) -> None:
|
||||||
|
"""Handle updated data from the coordinator."""
|
||||||
|
self._attr_native_value = self._temp_sensor.value
|
||||||
|
self._set_attr_available(
|
||||||
|
self._temp_sensor.last_update, self._temp_sensor.available
|
||||||
|
)
|
||||||
|
self.async_write_ha_state()
|
@ -45,6 +45,18 @@
|
|||||||
"switch_output": {
|
"switch_output": {
|
||||||
"name": "Relay Switch {index}"
|
"name": "Relay Switch {index}"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"sensor": {
|
||||||
|
"sensor_input": {
|
||||||
|
"name": "Input {index}",
|
||||||
|
"state": {
|
||||||
|
"undefined": "Undefined",
|
||||||
|
"tamper": "Tamper",
|
||||||
|
"active": "Active",
|
||||||
|
"normal": "Normal",
|
||||||
|
"problem": "Problem"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -38,6 +38,7 @@ from .const import (
|
|||||||
ATTR_HEATING_POWER_REQUEST,
|
ATTR_HEATING_POWER_REQUEST,
|
||||||
ATTR_SCHEDULE_NAME,
|
ATTR_SCHEDULE_NAME,
|
||||||
ATTR_SELECTED_SCHEDULE,
|
ATTR_SELECTED_SCHEDULE,
|
||||||
|
ATTR_SELECTED_SCHEDULE_ID,
|
||||||
ATTR_TARGET_TEMPERATURE,
|
ATTR_TARGET_TEMPERATURE,
|
||||||
ATTR_TIME_PERIOD,
|
ATTR_TIME_PERIOD,
|
||||||
DATA_SCHEDULES,
|
DATA_SCHEDULES,
|
||||||
@ -251,16 +252,22 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity):
|
|||||||
if data["event_type"] == EVENT_TYPE_SCHEDULE:
|
if data["event_type"] == EVENT_TYPE_SCHEDULE:
|
||||||
# handle schedule change
|
# handle schedule change
|
||||||
if "schedule_id" in data:
|
if "schedule_id" in data:
|
||||||
|
selected_schedule = self.hass.data[DOMAIN][DATA_SCHEDULES][
|
||||||
|
self.home.entity_id
|
||||||
|
].get(data["schedule_id"])
|
||||||
self._selected_schedule = getattr(
|
self._selected_schedule = getattr(
|
||||||
self.hass.data[DOMAIN][DATA_SCHEDULES][self.home.entity_id].get(
|
selected_schedule,
|
||||||
data["schedule_id"]
|
|
||||||
),
|
|
||||||
"name",
|
"name",
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
||||||
self._selected_schedule
|
self._selected_schedule
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE_ID] = getattr(
|
||||||
|
selected_schedule, "entity_id", None
|
||||||
|
)
|
||||||
|
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
self.data_handler.async_force_update(self._signal_name)
|
self.data_handler.async_force_update(self._signal_name)
|
||||||
# ignore other schedule events
|
# ignore other schedule events
|
||||||
@ -420,12 +427,14 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity):
|
|||||||
self._attr_hvac_mode = HVAC_MAP_NETATMO[self._attr_preset_mode]
|
self._attr_hvac_mode = HVAC_MAP_NETATMO[self._attr_preset_mode]
|
||||||
self._away = self._attr_hvac_mode == HVAC_MAP_NETATMO[STATE_NETATMO_AWAY]
|
self._away = self._attr_hvac_mode == HVAC_MAP_NETATMO[STATE_NETATMO_AWAY]
|
||||||
|
|
||||||
self._selected_schedule = getattr(
|
selected_schedule = self.home.get_selected_schedule()
|
||||||
self.home.get_selected_schedule(), "name", None
|
self._selected_schedule = getattr(selected_schedule, "name", None)
|
||||||
)
|
|
||||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
||||||
self._selected_schedule
|
self._selected_schedule
|
||||||
)
|
)
|
||||||
|
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE_ID] = getattr(
|
||||||
|
selected_schedule, "entity_id", None
|
||||||
|
)
|
||||||
|
|
||||||
if self.device_type == NA_VALVE:
|
if self.device_type == NA_VALVE:
|
||||||
self._attr_extra_state_attributes[ATTR_HEATING_POWER_REQUEST] = (
|
self._attr_extra_state_attributes[ATTR_HEATING_POWER_REQUEST] = (
|
||||||
|
@ -95,6 +95,7 @@ ATTR_PSEUDO = "pseudo"
|
|||||||
ATTR_SCHEDULE_ID = "schedule_id"
|
ATTR_SCHEDULE_ID = "schedule_id"
|
||||||
ATTR_SCHEDULE_NAME = "schedule_name"
|
ATTR_SCHEDULE_NAME = "schedule_name"
|
||||||
ATTR_SELECTED_SCHEDULE = "selected_schedule"
|
ATTR_SELECTED_SCHEDULE = "selected_schedule"
|
||||||
|
ATTR_SELECTED_SCHEDULE_ID = "selected_schedule_id"
|
||||||
ATTR_TARGET_TEMPERATURE = "target_temperature"
|
ATTR_TARGET_TEMPERATURE = "target_temperature"
|
||||||
ATTR_TIME_PERIOD = "time_period"
|
ATTR_TIME_PERIOD = "time_period"
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ from homeassistant.components.sensor import (
|
|||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import PERCENTAGE, UnitOfTemperature
|
from homeassistant.const import PERCENTAGE, UnitOfInformation, UnitOfTemperature
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
@ -84,6 +84,8 @@ async def async_setup_entry(
|
|||||||
OctoPrintJobPercentageSensor(coordinator, device_id),
|
OctoPrintJobPercentageSensor(coordinator, device_id),
|
||||||
OctoPrintEstimatedFinishTimeSensor(coordinator, device_id),
|
OctoPrintEstimatedFinishTimeSensor(coordinator, device_id),
|
||||||
OctoPrintStartTimeSensor(coordinator, device_id),
|
OctoPrintStartTimeSensor(coordinator, device_id),
|
||||||
|
OctoPrintFileNameSensor(coordinator, device_id),
|
||||||
|
OctoPrintFileSizeSensor(coordinator, device_id),
|
||||||
]
|
]
|
||||||
|
|
||||||
async_add_entities(entities)
|
async_add_entities(entities)
|
||||||
@ -262,3 +264,61 @@ class OctoPrintTemperatureSensor(OctoPrintSensorBase):
|
|||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Return if entity is available."""
|
"""Return if entity is available."""
|
||||||
return self.coordinator.last_update_success and self.coordinator.data["printer"]
|
return self.coordinator.last_update_success and self.coordinator.data["printer"]
|
||||||
|
|
||||||
|
|
||||||
|
class OctoPrintFileNameSensor(OctoPrintSensorBase):
|
||||||
|
"""Representation of an OctoPrint sensor."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: OctoprintDataUpdateCoordinator,
|
||||||
|
device_id: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize a new OctoPrint sensor."""
|
||||||
|
super().__init__(coordinator, "Current File", device_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> str | None:
|
||||||
|
"""Return sensor state."""
|
||||||
|
job: OctoprintJobInfo = self.coordinator.data["job"]
|
||||||
|
|
||||||
|
return job.job.file.name or None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return if entity is available."""
|
||||||
|
if not self.coordinator.last_update_success:
|
||||||
|
return False
|
||||||
|
job: OctoprintJobInfo = self.coordinator.data["job"]
|
||||||
|
return job and job.job.file.name
|
||||||
|
|
||||||
|
|
||||||
|
class OctoPrintFileSizeSensor(OctoPrintSensorBase):
|
||||||
|
"""Representation of an OctoPrint sensor."""
|
||||||
|
|
||||||
|
_attr_device_class = SensorDeviceClass.DATA_SIZE
|
||||||
|
_attr_native_unit_of_measurement = UnitOfInformation.BYTES
|
||||||
|
_attr_suggested_unit_of_measurement = UnitOfInformation.MEGABYTES
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: OctoprintDataUpdateCoordinator,
|
||||||
|
device_id: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize a new OctoPrint sensor."""
|
||||||
|
super().__init__(coordinator, "Current File Size", device_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> int | None:
|
||||||
|
"""Return sensor state."""
|
||||||
|
job: OctoprintJobInfo = self.coordinator.data["job"]
|
||||||
|
|
||||||
|
return job.job.file.size or None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return if entity is available."""
|
||||||
|
if not self.coordinator.last_update_success:
|
||||||
|
return False
|
||||||
|
job: OctoprintJobInfo = self.coordinator.data["job"]
|
||||||
|
return job and job.job.file.size
|
||||||
|
@ -29,6 +29,7 @@ PLATFORMS: list[Platform] = [
|
|||||||
Platform.BINARY_SENSOR,
|
Platform.BINARY_SENSOR,
|
||||||
Platform.BUTTON,
|
Platform.BUTTON,
|
||||||
Platform.CLIMATE,
|
Platform.CLIMATE,
|
||||||
|
Platform.FAN,
|
||||||
Platform.LOCK,
|
Platform.LOCK,
|
||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
Platform.SWITCH,
|
Platform.SWITCH,
|
||||||
@ -51,6 +52,7 @@ class SwitchbotDevices:
|
|||||||
sensors: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
sensors: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
||||||
vacuums: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
vacuums: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
||||||
locks: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
locks: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
||||||
|
fans: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -96,7 +98,6 @@ async def make_switchbot_devices(
|
|||||||
for device in devices
|
for device in devices
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
return devices_data
|
return devices_data
|
||||||
|
|
||||||
|
|
||||||
@ -177,6 +178,16 @@ async def make_device_data(
|
|||||||
else:
|
else:
|
||||||
devices_data.switches.append((device, coordinator))
|
devices_data.switches.append((device, coordinator))
|
||||||
|
|
||||||
|
if isinstance(device, Device) and device.device_type in [
|
||||||
|
"Battery Circulator Fan",
|
||||||
|
"Circulator Fan",
|
||||||
|
]:
|
||||||
|
coordinator = await coordinator_for_device(
|
||||||
|
hass, entry, api, device, coordinators_by_id
|
||||||
|
)
|
||||||
|
devices_data.fans.append((device, coordinator))
|
||||||
|
devices_data.sensors.append((device, coordinator))
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Set up SwitchBot via API from a config entry."""
|
"""Set up SwitchBot via API from a config entry."""
|
||||||
|
120
homeassistant/components/switchbot_cloud/fan.py
Normal file
120
homeassistant/components/switchbot_cloud/fan.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
"""Support for the Switchbot Battery Circulator fan."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from switchbot_api import (
|
||||||
|
BatteryCirculatorFanCommands,
|
||||||
|
BatteryCirculatorFanMode,
|
||||||
|
CommonCommands,
|
||||||
|
)
|
||||||
|
|
||||||
|
from homeassistant.components.fan import FanEntity, FanEntityFeature
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from . import SwitchbotCloudData
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .entity import SwitchBotCloudEntity
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up SwitchBot Cloud entry."""
|
||||||
|
data: SwitchbotCloudData = hass.data[DOMAIN][config.entry_id]
|
||||||
|
async_add_entities(
|
||||||
|
SwitchBotCloudFan(data.api, device, coordinator)
|
||||||
|
for device, coordinator in data.devices.fans
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SwitchBotCloudFan(SwitchBotCloudEntity, FanEntity):
|
||||||
|
"""Representation of a SwitchBot Battery Circulator Fan."""
|
||||||
|
|
||||||
|
_attr_name = None
|
||||||
|
|
||||||
|
_attr_supported_features = (
|
||||||
|
FanEntityFeature.SET_SPEED
|
||||||
|
| FanEntityFeature.PRESET_MODE
|
||||||
|
| FanEntityFeature.TURN_OFF
|
||||||
|
| FanEntityFeature.TURN_ON
|
||||||
|
)
|
||||||
|
_attr_preset_modes = list(BatteryCirculatorFanMode)
|
||||||
|
|
||||||
|
_attr_is_on: bool | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool | None:
|
||||||
|
"""Return true if the entity is on."""
|
||||||
|
return self._attr_is_on
|
||||||
|
|
||||||
|
def _set_attributes(self) -> None:
|
||||||
|
"""Set attributes from coordinator data."""
|
||||||
|
if self.coordinator.data is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
power: str = self.coordinator.data["power"]
|
||||||
|
mode: str = self.coordinator.data["mode"]
|
||||||
|
fan_speed: str = self.coordinator.data["fanSpeed"]
|
||||||
|
self._attr_is_on = power == "on"
|
||||||
|
self._attr_preset_mode = mode
|
||||||
|
self._attr_percentage = int(fan_speed)
|
||||||
|
self._attr_supported_features = (
|
||||||
|
FanEntityFeature.PRESET_MODE
|
||||||
|
| FanEntityFeature.TURN_OFF
|
||||||
|
| FanEntityFeature.TURN_ON
|
||||||
|
)
|
||||||
|
if self.is_on and self.preset_mode == BatteryCirculatorFanMode.DIRECT.value:
|
||||||
|
self._attr_supported_features |= FanEntityFeature.SET_SPEED
|
||||||
|
|
||||||
|
async def async_turn_on(
|
||||||
|
self,
|
||||||
|
percentage: int | None = None,
|
||||||
|
preset_mode: str | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
"""Turn on the fan."""
|
||||||
|
await self.send_api_command(CommonCommands.ON)
|
||||||
|
await self.send_api_command(
|
||||||
|
command=BatteryCirculatorFanCommands.SET_WIND_MODE,
|
||||||
|
parameters=str(self.preset_mode),
|
||||||
|
)
|
||||||
|
if self.preset_mode == BatteryCirculatorFanMode.DIRECT.value:
|
||||||
|
await self.send_api_command(
|
||||||
|
command=BatteryCirculatorFanCommands.SET_WIND_SPEED,
|
||||||
|
parameters=str(self.percentage),
|
||||||
|
)
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn off the fan."""
|
||||||
|
await self.send_api_command(CommonCommands.OFF)
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
|
|
||||||
|
async def async_set_percentage(self, percentage: int) -> None:
|
||||||
|
"""Set the speed of the fan, as a percentage."""
|
||||||
|
await self.send_api_command(
|
||||||
|
command=BatteryCirculatorFanCommands.SET_WIND_MODE,
|
||||||
|
parameters=str(BatteryCirculatorFanMode.DIRECT.value),
|
||||||
|
)
|
||||||
|
await self.send_api_command(
|
||||||
|
command=BatteryCirculatorFanCommands.SET_WIND_SPEED,
|
||||||
|
parameters=str(percentage),
|
||||||
|
)
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
|
|
||||||
|
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||||
|
"""Set new preset mode."""
|
||||||
|
await self.send_api_command(
|
||||||
|
command=BatteryCirculatorFanCommands.SET_WIND_MODE,
|
||||||
|
parameters=preset_mode,
|
||||||
|
)
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
await self.coordinator.async_request_refresh()
|
@ -91,6 +91,7 @@ CO2_DESCRIPTION = SensorEntityDescription(
|
|||||||
|
|
||||||
SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||||
"Bot": (BATTERY_DESCRIPTION,),
|
"Bot": (BATTERY_DESCRIPTION,),
|
||||||
|
"Battery Circulator Fan": (BATTERY_DESCRIPTION,),
|
||||||
"Meter": (
|
"Meter": (
|
||||||
TEMPERATURE_DESCRIPTION,
|
TEMPERATURE_DESCRIPTION,
|
||||||
HUMIDITY_DESCRIPTION,
|
HUMIDITY_DESCRIPTION,
|
||||||
|
@ -32,8 +32,6 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, selector, template
|
from homeassistant.helpers import config_validation as cv, selector, template
|
||||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import (
|
||||||
AddConfigEntryEntitiesCallback,
|
AddConfigEntryEntitiesCallback,
|
||||||
AddEntitiesCallback,
|
AddEntitiesCallback,
|
||||||
@ -42,15 +40,11 @@ from homeassistant.helpers.restore_state import RestoreEntity
|
|||||||
from homeassistant.helpers.script import Script
|
from homeassistant.helpers.script import Script
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
from .const import CONF_OBJECT_ID, DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import TriggerUpdateCoordinator
|
from .coordinator import TriggerUpdateCoordinator
|
||||||
from .entity import AbstractTemplateEntity
|
from .entity import AbstractTemplateEntity
|
||||||
from .template_entity import (
|
from .helpers import async_setup_template_platform
|
||||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||||
TemplateEntity,
|
|
||||||
make_template_entity_common_modern_schema,
|
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -88,7 +82,7 @@ class TemplateCodeFormat(Enum):
|
|||||||
text = CodeFormat.TEXT
|
text = CodeFormat.TEXT
|
||||||
|
|
||||||
|
|
||||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
LEGACY_FIELDS = {
|
||||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -161,54 +155,6 @@ ALARM_CONTROL_PANEL_CONFIG_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rewrite_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant, config: dict[str, dict]
|
|
||||||
) -> list[dict]:
|
|
||||||
"""Rewrite legacy alarm control panel configuration definitions to modern ones."""
|
|
||||||
alarm_control_panels = []
|
|
||||||
|
|
||||||
for object_id, entity_conf in config.items():
|
|
||||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
|
||||||
|
|
||||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass, entity_conf, LEGACY_FIELDS
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_NAME not in entity_conf:
|
|
||||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
|
||||||
|
|
||||||
alarm_control_panels.append(entity_conf)
|
|
||||||
|
|
||||||
return alarm_control_panels
|
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the template alarm control panels."""
|
|
||||||
alarm_control_panels = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
alarm_control_panels.append(
|
|
||||||
AlarmControlPanelTemplate(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(alarm_control_panels)
|
|
||||||
|
|
||||||
|
|
||||||
def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str, dict]:
|
def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str, dict]:
|
||||||
"""Rewrite option configuration to modern configuration."""
|
"""Rewrite option configuration to modern configuration."""
|
||||||
option_config = {**option_config}
|
option_config = {**option_config}
|
||||||
@ -231,7 +177,7 @@ async def async_setup_entry(
|
|||||||
validated_config = ALARM_CONTROL_PANEL_CONFIG_SCHEMA(_options)
|
validated_config = ALARM_CONTROL_PANEL_CONFIG_SCHEMA(_options)
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[
|
[
|
||||||
AlarmControlPanelTemplate(
|
StateAlarmControlPanelEntity(
|
||||||
hass,
|
hass,
|
||||||
validated_config,
|
validated_config,
|
||||||
config_entry.entry_id,
|
config_entry.entry_id,
|
||||||
@ -247,27 +193,16 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Template cover."""
|
"""Set up the Template cover."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
rewrite_legacy_to_modern_conf(hass, config[CONF_ALARM_CONTROL_PANELS]),
|
ALARM_CONTROL_PANEL_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
StateAlarmControlPanelEntity,
|
||||||
return
|
TriggerAlarmControlPanelEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerAlarmControlPanelEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
legacy_key=CONF_ALARM_CONTROL_PANELS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -276,6 +211,8 @@ class AbstractTemplateAlarmControlPanel(
|
|||||||
):
|
):
|
||||||
"""Representation of a templated Alarm Control Panel features."""
|
"""Representation of a templated Alarm Control Panel features."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||||
@ -414,7 +351,7 @@ class AbstractTemplateAlarmControlPanel(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AlarmControlPanelTemplate(TemplateEntity, AbstractTemplateAlarmControlPanel):
|
class StateAlarmControlPanelEntity(TemplateEntity, AbstractTemplateAlarmControlPanel):
|
||||||
"""Representation of a templated Alarm Control Panel."""
|
"""Representation of a templated Alarm Control Panel."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
@ -426,12 +363,8 @@ class AlarmControlPanelTemplate(TemplateEntity, AbstractTemplateAlarmControlPane
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the panel."""
|
"""Initialize the panel."""
|
||||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
AbstractTemplateAlarmControlPanel.__init__(self, config)
|
AbstractTemplateAlarmControlPanel.__init__(self, config)
|
||||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
|
||||||
self.entity_id = async_generate_entity_id(
|
|
||||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
|
||||||
)
|
|
||||||
name = self._attr_name
|
name = self._attr_name
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert name is not None
|
assert name is not None
|
||||||
@ -442,11 +375,6 @@ class AlarmControlPanelTemplate(TemplateEntity, AbstractTemplateAlarmControlPane
|
|||||||
self.add_script(action_id, action_config, name, DOMAIN)
|
self.add_script(action_id, action_config, name, DOMAIN)
|
||||||
self._attr_supported_features |= supported_feature
|
self._attr_supported_features |= supported_feature
|
||||||
|
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_added_to_hass(self) -> None:
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Restore last state."""
|
"""Restore last state."""
|
||||||
await super().async_added_to_hass()
|
await super().async_added_to_hass()
|
||||||
@ -497,11 +425,6 @@ class TriggerAlarmControlPanelEntity(TriggerEntity, AbstractTemplateAlarmControl
|
|||||||
self.add_script(action_id, action_config, name, DOMAIN)
|
self.add_script(action_id, action_config, name, DOMAIN)
|
||||||
self._attr_supported_features |= supported_feature
|
self._attr_supported_features |= supported_feature
|
||||||
|
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_added_to_hass(self) -> None:
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Restore last state."""
|
"""Restore last state."""
|
||||||
await super().async_added_to_hass()
|
await super().async_added_to_hass()
|
||||||
|
@ -24,9 +24,7 @@ from homeassistant.const import (
|
|||||||
CONF_DEVICE_CLASS,
|
CONF_DEVICE_CLASS,
|
||||||
CONF_DEVICE_ID,
|
CONF_DEVICE_ID,
|
||||||
CONF_ENTITY_PICTURE_TEMPLATE,
|
CONF_ENTITY_PICTURE_TEMPLATE,
|
||||||
CONF_FRIENDLY_NAME,
|
|
||||||
CONF_FRIENDLY_NAME_TEMPLATE,
|
CONF_FRIENDLY_NAME_TEMPLATE,
|
||||||
CONF_ICON,
|
|
||||||
CONF_ICON_TEMPLATE,
|
CONF_ICON_TEMPLATE,
|
||||||
CONF_NAME,
|
CONF_NAME,
|
||||||
CONF_SENSORS,
|
CONF_SENSORS,
|
||||||
@ -41,8 +39,6 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, selector, template
|
from homeassistant.helpers import config_validation as cv, selector, template
|
||||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import (
|
||||||
AddConfigEntryEntitiesCallback,
|
AddConfigEntryEntitiesCallback,
|
||||||
AddEntitiesCallback,
|
AddEntitiesCallback,
|
||||||
@ -53,18 +49,9 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from . import TriggerUpdateCoordinator
|
from . import TriggerUpdateCoordinator
|
||||||
from .const import (
|
from .const import CONF_AVAILABILITY_TEMPLATE
|
||||||
CONF_ATTRIBUTES,
|
from .helpers import async_setup_template_platform
|
||||||
CONF_AVAILABILITY,
|
from .template_entity import TEMPLATE_ENTITY_COMMON_SCHEMA, TemplateEntity
|
||||||
CONF_AVAILABILITY_TEMPLATE,
|
|
||||||
CONF_OBJECT_ID,
|
|
||||||
CONF_PICTURE,
|
|
||||||
)
|
|
||||||
from .template_entity import (
|
|
||||||
TEMPLATE_ENTITY_COMMON_SCHEMA,
|
|
||||||
TemplateEntity,
|
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
CONF_DELAY_ON = "delay_on"
|
CONF_DELAY_ON = "delay_on"
|
||||||
@ -73,12 +60,7 @@ CONF_AUTO_OFF = "auto_off"
|
|||||||
CONF_ATTRIBUTE_TEMPLATES = "attribute_templates"
|
CONF_ATTRIBUTE_TEMPLATES = "attribute_templates"
|
||||||
|
|
||||||
LEGACY_FIELDS = {
|
LEGACY_FIELDS = {
|
||||||
CONF_ICON_TEMPLATE: CONF_ICON,
|
|
||||||
CONF_ENTITY_PICTURE_TEMPLATE: CONF_PICTURE,
|
|
||||||
CONF_AVAILABILITY_TEMPLATE: CONF_AVAILABILITY,
|
|
||||||
CONF_ATTRIBUTE_TEMPLATES: CONF_ATTRIBUTES,
|
|
||||||
CONF_FRIENDLY_NAME_TEMPLATE: CONF_NAME,
|
CONF_FRIENDLY_NAME_TEMPLATE: CONF_NAME,
|
||||||
CONF_FRIENDLY_NAME: CONF_NAME,
|
|
||||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,27 +103,6 @@ LEGACY_BINARY_SENSOR_SCHEMA = vol.All(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rewrite_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant, cfg: dict[str, dict]
|
|
||||||
) -> list[dict]:
|
|
||||||
"""Rewrite legacy binary sensor definitions to modern ones."""
|
|
||||||
sensors = []
|
|
||||||
|
|
||||||
for object_id, entity_cfg in cfg.items():
|
|
||||||
entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id}
|
|
||||||
|
|
||||||
entity_cfg = rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass, entity_cfg, LEGACY_FIELDS
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_NAME not in entity_cfg:
|
|
||||||
entity_cfg[CONF_NAME] = template.Template(object_id, hass)
|
|
||||||
|
|
||||||
sensors.append(entity_cfg)
|
|
||||||
|
|
||||||
return sensors
|
|
||||||
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend(
|
PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(
|
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(
|
||||||
@ -151,33 +112,6 @@ PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback | AddConfigEntryEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the template binary sensors."""
|
|
||||||
sensors = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
sensors.append(
|
|
||||||
BinarySensorTemplate(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(sensors)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -185,27 +119,16 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template binary sensors."""
|
"""Set up the template binary sensors."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
rewrite_legacy_to_modern_conf(hass, config[CONF_SENSORS]),
|
BINARY_SENSOR_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
StateBinarySensorEntity,
|
||||||
return
|
TriggerBinarySensorEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerBinarySensorEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
legacy_key=CONF_SENSORS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -219,23 +142,24 @@ async def async_setup_entry(
|
|||||||
_options.pop("template_type")
|
_options.pop("template_type")
|
||||||
validated_config = BINARY_SENSOR_CONFIG_SCHEMA(_options)
|
validated_config = BINARY_SENSOR_CONFIG_SCHEMA(_options)
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[BinarySensorTemplate(hass, validated_config, config_entry.entry_id)]
|
[StateBinarySensorEntity(hass, validated_config, config_entry.entry_id)]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_create_preview_binary_sensor(
|
def async_create_preview_binary_sensor(
|
||||||
hass: HomeAssistant, name: str, config: dict[str, Any]
|
hass: HomeAssistant, name: str, config: dict[str, Any]
|
||||||
) -> BinarySensorTemplate:
|
) -> StateBinarySensorEntity:
|
||||||
"""Create a preview sensor."""
|
"""Create a preview sensor."""
|
||||||
validated_config = BINARY_SENSOR_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
validated_config = BINARY_SENSOR_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
||||||
return BinarySensorTemplate(hass, validated_config, None)
|
return StateBinarySensorEntity(hass, validated_config, None)
|
||||||
|
|
||||||
|
|
||||||
class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
class StateBinarySensorEntity(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
||||||
"""A virtual binary sensor that triggers from another sensor."""
|
"""A virtual binary sensor that triggers from another sensor."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -244,11 +168,7 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the Template binary sensor."""
|
"""Initialize the Template binary sensor."""
|
||||||
super().__init__(hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
|
||||||
self.entity_id = async_generate_entity_id(
|
|
||||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
|
||||||
)
|
|
||||||
|
|
||||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||||
self._template = config[CONF_STATE]
|
self._template = config[CONF_STATE]
|
||||||
@ -257,10 +177,6 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
|||||||
self._delay_on_raw = config.get(CONF_DELAY_ON)
|
self._delay_on_raw = config.get(CONF_DELAY_ON)
|
||||||
self._delay_off = None
|
self._delay_off = None
|
||||||
self._delay_off_raw = config.get(CONF_DELAY_OFF)
|
self._delay_off_raw = config.get(CONF_DELAY_OFF)
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_added_to_hass(self) -> None:
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Restore state."""
|
"""Restore state."""
|
||||||
@ -333,6 +249,7 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
|||||||
class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity):
|
class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity):
|
||||||
"""Sensor entity based on trigger data."""
|
"""Sensor entity based on trigger data."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
domain = BINARY_SENSOR_DOMAIN
|
domain = BINARY_SENSOR_DOMAIN
|
||||||
extra_template_keys = (CONF_STATE,)
|
extra_template_keys = (CONF_STATE,)
|
||||||
|
|
||||||
|
@ -3,22 +3,20 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.button import DEVICE_CLASSES_SCHEMA, ButtonEntity
|
from homeassistant.components.button import (
|
||||||
from homeassistant.config_entries import ConfigEntry
|
DEVICE_CLASSES_SCHEMA,
|
||||||
from homeassistant.const import (
|
DOMAIN as BUTTON_DOMAIN,
|
||||||
CONF_DEVICE_CLASS,
|
ENTITY_ID_FORMAT,
|
||||||
CONF_DEVICE_ID,
|
ButtonEntity,
|
||||||
CONF_NAME,
|
|
||||||
CONF_UNIQUE_ID,
|
|
||||||
)
|
)
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_DEVICE_CLASS, CONF_DEVICE_ID, CONF_NAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import PlatformNotReady
|
|
||||||
from homeassistant.helpers import config_validation as cv, selector
|
from homeassistant.helpers import config_validation as cv, selector
|
||||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import (
|
||||||
AddConfigEntryEntitiesCallback,
|
AddConfigEntryEntitiesCallback,
|
||||||
AddEntitiesCallback,
|
AddEntitiesCallback,
|
||||||
@ -26,6 +24,7 @@ from homeassistant.helpers.entity_platform import (
|
|||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
from .const import CONF_PRESS, DOMAIN
|
from .const import CONF_PRESS, DOMAIN
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -50,19 +49,6 @@ CONFIG_BUTTON_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def _async_create_entities(
|
|
||||||
hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None
|
|
||||||
) -> list[TemplateButtonEntity]:
|
|
||||||
"""Create the Template button."""
|
|
||||||
entities = []
|
|
||||||
for definition in definitions:
|
|
||||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
entities.append(TemplateButtonEntity(hass, definition, unique_id))
|
|
||||||
return entities
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -70,15 +56,14 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template button."""
|
"""Set up the template button."""
|
||||||
if not discovery_info or "coordinator" in discovery_info:
|
await async_setup_template_platform(
|
||||||
raise PlatformNotReady(
|
hass,
|
||||||
"The template button platform doesn't support trigger entities"
|
BUTTON_DOMAIN,
|
||||||
)
|
config,
|
||||||
|
StateButtonEntity,
|
||||||
async_add_entities(
|
None,
|
||||||
await _async_create_entities(
|
async_add_entities,
|
||||||
hass, discovery_info["entities"], discovery_info["unique_id"]
|
discovery_info,
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -92,14 +77,15 @@ async def async_setup_entry(
|
|||||||
_options.pop("template_type")
|
_options.pop("template_type")
|
||||||
validated_config = CONFIG_BUTTON_SCHEMA(_options)
|
validated_config = CONFIG_BUTTON_SCHEMA(_options)
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[TemplateButtonEntity(hass, validated_config, config_entry.entry_id)]
|
[StateButtonEntity(hass, validated_config, config_entry.entry_id)]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class TemplateButtonEntity(TemplateEntity, ButtonEntity):
|
class StateButtonEntity(TemplateEntity, ButtonEntity):
|
||||||
"""Representation of a template button."""
|
"""Representation of a template button."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -108,17 +94,16 @@ class TemplateButtonEntity(TemplateEntity, ButtonEntity):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the button."""
|
"""Initialize the button."""
|
||||||
super().__init__(hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
assert self._attr_name is not None
|
assert self._attr_name is not None
|
||||||
|
|
||||||
# Scripts can be an empty list, therefore we need to check for None
|
# Scripts can be an empty list, therefore we need to check for None
|
||||||
if (action := config.get(CONF_PRESS)) is not None:
|
if (action := config.get(CONF_PRESS)) is not None:
|
||||||
self.add_script(CONF_PRESS, action, self._attr_name, DOMAIN)
|
self.add_script(CONF_PRESS, action, self._attr_name, DOMAIN)
|
||||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||||
self._attr_state = None
|
self._attr_state = None
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_press(self) -> None:
|
async def async_press(self) -> None:
|
||||||
"""Press the button."""
|
"""Press the button."""
|
||||||
|
@ -65,7 +65,7 @@ from . import (
|
|||||||
weather as weather_platform,
|
weather as weather_platform,
|
||||||
)
|
)
|
||||||
from .const import DOMAIN, PLATFORMS, TemplateConfig
|
from .const import DOMAIN, PLATFORMS, TemplateConfig
|
||||||
from .helpers import async_get_blueprints
|
from .helpers import async_get_blueprints, rewrite_legacy_to_modern_configs
|
||||||
|
|
||||||
PACKAGE_MERGE_HINT = "list"
|
PACKAGE_MERGE_HINT = "list"
|
||||||
|
|
||||||
@ -249,16 +249,16 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf
|
|||||||
|
|
||||||
legacy_warn_printed = False
|
legacy_warn_printed = False
|
||||||
|
|
||||||
for old_key, new_key, transform in (
|
for old_key, new_key, legacy_fields in (
|
||||||
(
|
(
|
||||||
CONF_SENSORS,
|
CONF_SENSORS,
|
||||||
DOMAIN_SENSOR,
|
DOMAIN_SENSOR,
|
||||||
sensor_platform.rewrite_legacy_to_modern_conf,
|
sensor_platform.LEGACY_FIELDS,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
CONF_BINARY_SENSORS,
|
CONF_BINARY_SENSORS,
|
||||||
DOMAIN_BINARY_SENSOR,
|
DOMAIN_BINARY_SENSOR,
|
||||||
binary_sensor_platform.rewrite_legacy_to_modern_conf,
|
binary_sensor_platform.LEGACY_FIELDS,
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
if old_key not in template_config:
|
if old_key not in template_config:
|
||||||
@ -276,7 +276,11 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf
|
|||||||
definitions = (
|
definitions = (
|
||||||
list(template_config[new_key]) if new_key in template_config else []
|
list(template_config[new_key]) if new_key in template_config else []
|
||||||
)
|
)
|
||||||
definitions.extend(transform(hass, template_config[old_key]))
|
definitions.extend(
|
||||||
|
rewrite_legacy_to_modern_configs(
|
||||||
|
hass, template_config[old_key], legacy_fields
|
||||||
|
)
|
||||||
|
)
|
||||||
template_config = TemplateConfig({**template_config, new_key: definitions})
|
template_config = TemplateConfig({**template_config, new_key: definitions})
|
||||||
|
|
||||||
config_sections.append(template_config)
|
config_sections.append(template_config)
|
||||||
|
@ -32,19 +32,17 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, template
|
from homeassistant.helpers import config_validation as cv, template
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
from . import TriggerUpdateCoordinator
|
from . import TriggerUpdateCoordinator
|
||||||
from .const import CONF_OBJECT_ID, DOMAIN
|
from .const import DOMAIN
|
||||||
from .entity import AbstractTemplateEntity
|
from .entity import AbstractTemplateEntity
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import (
|
from .template_entity import (
|
||||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
|
||||||
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
||||||
TemplateEntity,
|
TemplateEntity,
|
||||||
make_template_entity_common_modern_schema,
|
make_template_entity_common_modern_schema,
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
)
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
@ -85,7 +83,7 @@ TILT_FEATURES = (
|
|||||||
| CoverEntityFeature.SET_TILT_POSITION
|
| CoverEntityFeature.SET_TILT_POSITION
|
||||||
)
|
)
|
||||||
|
|
||||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
LEGACY_FIELDS = {
|
||||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||||
CONF_POSITION_TEMPLATE: CONF_POSITION,
|
CONF_POSITION_TEMPLATE: CONF_POSITION,
|
||||||
CONF_TILT_TEMPLATE: CONF_TILT,
|
CONF_TILT_TEMPLATE: CONF_TILT,
|
||||||
@ -140,54 +138,6 @@ PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rewrite_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant, config: dict[str, dict]
|
|
||||||
) -> list[dict]:
|
|
||||||
"""Rewrite legacy switch configuration definitions to modern ones."""
|
|
||||||
covers = []
|
|
||||||
|
|
||||||
for object_id, entity_conf in config.items():
|
|
||||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
|
||||||
|
|
||||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass, entity_conf, LEGACY_FIELDS
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_NAME not in entity_conf:
|
|
||||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
|
||||||
|
|
||||||
covers.append(entity_conf)
|
|
||||||
|
|
||||||
return covers
|
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the template switches."""
|
|
||||||
covers = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
covers.append(
|
|
||||||
CoverTemplate(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(covers)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -195,33 +145,24 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Template cover."""
|
"""Set up the Template cover."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
rewrite_legacy_to_modern_conf(hass, config[CONF_COVERS]),
|
COVER_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
StateCoverEntity,
|
||||||
return
|
TriggerCoverEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerCoverEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
legacy_key=CONF_COVERS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AbstractTemplateCover(AbstractTemplateEntity, CoverEntity):
|
class AbstractTemplateCover(AbstractTemplateEntity, CoverEntity):
|
||||||
"""Representation of a template cover features."""
|
"""Representation of a template cover features."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||||
@ -445,7 +386,7 @@ class AbstractTemplateCover(AbstractTemplateEntity, CoverEntity):
|
|||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
|
||||||
class CoverTemplate(TemplateEntity, AbstractTemplateCover):
|
class StateCoverEntity(TemplateEntity, AbstractTemplateCover):
|
||||||
"""Representation of a Template cover."""
|
"""Representation of a Template cover."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
@ -457,12 +398,8 @@ class CoverTemplate(TemplateEntity, AbstractTemplateCover):
|
|||||||
unique_id,
|
unique_id,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the Template cover."""
|
"""Initialize the Template cover."""
|
||||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
AbstractTemplateCover.__init__(self, config)
|
AbstractTemplateCover.__init__(self, config)
|
||||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
|
||||||
self.entity_id = async_generate_entity_id(
|
|
||||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
|
||||||
)
|
|
||||||
name = self._attr_name
|
name = self._attr_name
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert name is not None
|
assert name is not None
|
||||||
|
@ -1,32 +1,50 @@
|
|||||||
"""Template entity base class."""
|
"""Template entity base class."""
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.const import CONF_DEVICE_ID
|
||||||
from homeassistant.core import Context, HomeAssistant, callback
|
from homeassistant.core import Context, HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||||
|
from homeassistant.helpers.entity import Entity, async_generate_entity_id
|
||||||
from homeassistant.helpers.script import Script, _VarsType
|
from homeassistant.helpers.script import Script, _VarsType
|
||||||
from homeassistant.helpers.template import TemplateStateFromEntityId
|
from homeassistant.helpers.template import TemplateStateFromEntityId
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
|
from .const import CONF_OBJECT_ID
|
||||||
|
|
||||||
|
|
||||||
class AbstractTemplateEntity(Entity):
|
class AbstractTemplateEntity(Entity):
|
||||||
"""Actions linked to a template entity."""
|
"""Actions linked to a template entity."""
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
_entity_id_format: str
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||||
"""Initialize the entity."""
|
"""Initialize the entity."""
|
||||||
|
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._action_scripts: dict[str, Script] = {}
|
self._action_scripts: dict[str, Script] = {}
|
||||||
|
|
||||||
|
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||||
|
self.entity_id = async_generate_entity_id(
|
||||||
|
self._entity_id_format, object_id, hass=self.hass
|
||||||
|
)
|
||||||
|
|
||||||
|
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||||
|
self.hass,
|
||||||
|
config.get(CONF_DEVICE_ID),
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@abstractmethod
|
||||||
def referenced_blueprint(self) -> str | None:
|
def referenced_blueprint(self) -> str | None:
|
||||||
"""Return referenced blueprint or None."""
|
"""Return referenced blueprint or None."""
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
|
@abstractmethod
|
||||||
def _render_script_variables(self) -> dict:
|
def _render_script_variables(self) -> dict:
|
||||||
"""Render configured variables."""
|
"""Render configured variables."""
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def add_script(
|
def add_script(
|
||||||
self,
|
self,
|
||||||
|
@ -34,19 +34,17 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, template
|
from homeassistant.helpers import config_validation as cv, template
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
from .const import CONF_OBJECT_ID, DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import TriggerUpdateCoordinator
|
from .coordinator import TriggerUpdateCoordinator
|
||||||
from .entity import AbstractTemplateEntity
|
from .entity import AbstractTemplateEntity
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import (
|
from .template_entity import (
|
||||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
|
||||||
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
||||||
TemplateEntity,
|
TemplateEntity,
|
||||||
make_template_entity_common_modern_schema,
|
make_template_entity_common_modern_schema,
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
)
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
@ -73,7 +71,7 @@ CONF_OSCILLATING = "oscillating"
|
|||||||
CONF_PERCENTAGE = "percentage"
|
CONF_PERCENTAGE = "percentage"
|
||||||
CONF_PRESET_MODE = "preset_mode"
|
CONF_PRESET_MODE = "preset_mode"
|
||||||
|
|
||||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
LEGACY_FIELDS = {
|
||||||
CONF_DIRECTION_TEMPLATE: CONF_DIRECTION,
|
CONF_DIRECTION_TEMPLATE: CONF_DIRECTION,
|
||||||
CONF_OSCILLATING_TEMPLATE: CONF_OSCILLATING,
|
CONF_OSCILLATING_TEMPLATE: CONF_OSCILLATING,
|
||||||
CONF_PERCENTAGE_TEMPLATE: CONF_PERCENTAGE,
|
CONF_PERCENTAGE_TEMPLATE: CONF_PERCENTAGE,
|
||||||
@ -132,54 +130,6 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rewrite_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant, config: dict[str, dict]
|
|
||||||
) -> list[dict]:
|
|
||||||
"""Rewrite legacy fan configuration definitions to modern ones."""
|
|
||||||
fans = []
|
|
||||||
|
|
||||||
for object_id, entity_conf in config.items():
|
|
||||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
|
||||||
|
|
||||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass, entity_conf, LEGACY_FIELDS
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_NAME not in entity_conf:
|
|
||||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
|
||||||
|
|
||||||
fans.append(entity_conf)
|
|
||||||
|
|
||||||
return fans
|
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the template fans."""
|
|
||||||
fans = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
fans.append(
|
|
||||||
TemplateFan(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(fans)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -187,33 +137,24 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template fans."""
|
"""Set up the template fans."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
rewrite_legacy_to_modern_conf(hass, config[CONF_FANS]),
|
FAN_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
StateFanEntity,
|
||||||
return
|
TriggerFanEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerFanEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
legacy_key=CONF_FANS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||||
"""Representation of a template fan features."""
|
"""Representation of a template fan features."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||||
@ -484,7 +425,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class TemplateFan(TemplateEntity, AbstractTemplateFan):
|
class StateFanEntity(TemplateEntity, AbstractTemplateFan):
|
||||||
"""A template fan component."""
|
"""A template fan component."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
@ -496,12 +437,8 @@ class TemplateFan(TemplateEntity, AbstractTemplateFan):
|
|||||||
unique_id,
|
unique_id,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the fan."""
|
"""Initialize the fan."""
|
||||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
AbstractTemplateFan.__init__(self, config)
|
AbstractTemplateFan.__init__(self, config)
|
||||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
|
||||||
self.entity_id = async_generate_entity_id(
|
|
||||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
|
||||||
)
|
|
||||||
name = self._attr_name
|
name = self._attr_name
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert name is not None
|
assert name is not None
|
||||||
|
@ -1,19 +1,60 @@
|
|||||||
"""Helpers for template integration."""
|
"""Helpers for template integration."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.components import blueprint
|
from homeassistant.components import blueprint
|
||||||
from homeassistant.const import SERVICE_RELOAD
|
from homeassistant.const import (
|
||||||
|
CONF_ENTITY_PICTURE_TEMPLATE,
|
||||||
|
CONF_FRIENDLY_NAME,
|
||||||
|
CONF_ICON,
|
||||||
|
CONF_ICON_TEMPLATE,
|
||||||
|
CONF_NAME,
|
||||||
|
CONF_UNIQUE_ID,
|
||||||
|
SERVICE_RELOAD,
|
||||||
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity_platform import async_get_platforms
|
from homeassistant.exceptions import PlatformNotReady
|
||||||
|
from homeassistant.helpers import template
|
||||||
|
from homeassistant.helpers.entity import Entity
|
||||||
|
from homeassistant.helpers.entity_platform import (
|
||||||
|
AddEntitiesCallback,
|
||||||
|
async_get_platforms,
|
||||||
|
)
|
||||||
from homeassistant.helpers.singleton import singleton
|
from homeassistant.helpers.singleton import singleton
|
||||||
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import (
|
||||||
|
CONF_ATTRIBUTE_TEMPLATES,
|
||||||
|
CONF_ATTRIBUTES,
|
||||||
|
CONF_AVAILABILITY,
|
||||||
|
CONF_AVAILABILITY_TEMPLATE,
|
||||||
|
CONF_OBJECT_ID,
|
||||||
|
CONF_PICTURE,
|
||||||
|
DOMAIN,
|
||||||
|
)
|
||||||
from .entity import AbstractTemplateEntity
|
from .entity import AbstractTemplateEntity
|
||||||
|
from .template_entity import TemplateEntity
|
||||||
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
DATA_BLUEPRINTS = "template_blueprints"
|
DATA_BLUEPRINTS = "template_blueprints"
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LEGACY_FIELDS = {
|
||||||
|
CONF_ICON_TEMPLATE: CONF_ICON,
|
||||||
|
CONF_ENTITY_PICTURE_TEMPLATE: CONF_PICTURE,
|
||||||
|
CONF_AVAILABILITY_TEMPLATE: CONF_AVAILABILITY,
|
||||||
|
CONF_ATTRIBUTE_TEMPLATES: CONF_ATTRIBUTES,
|
||||||
|
CONF_FRIENDLY_NAME: CONF_NAME,
|
||||||
|
}
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type CreateTemplateEntitiesCallback = Callable[
|
||||||
|
[type[TemplateEntity], AddEntitiesCallback, HomeAssistant, list[dict], str | None],
|
||||||
|
None,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@ -59,8 +100,131 @@ def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
|||||||
return blueprint.DomainBlueprints(
|
return blueprint.DomainBlueprints(
|
||||||
hass,
|
hass,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
LOGGER,
|
_LOGGER,
|
||||||
_blueprint_in_use,
|
_blueprint_in_use,
|
||||||
_reload_blueprint_templates,
|
_reload_blueprint_templates,
|
||||||
TEMPLATE_BLUEPRINT_SCHEMA,
|
TEMPLATE_BLUEPRINT_SCHEMA,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def rewrite_legacy_to_modern_config(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entity_cfg: dict[str, Any],
|
||||||
|
extra_legacy_fields: dict[str, str],
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Rewrite legacy config."""
|
||||||
|
entity_cfg = {**entity_cfg}
|
||||||
|
|
||||||
|
for from_key, to_key in itertools.chain(
|
||||||
|
LEGACY_FIELDS.items(), extra_legacy_fields.items()
|
||||||
|
):
|
||||||
|
if from_key not in entity_cfg or to_key in entity_cfg:
|
||||||
|
continue
|
||||||
|
|
||||||
|
val = entity_cfg.pop(from_key)
|
||||||
|
if isinstance(val, str):
|
||||||
|
val = template.Template(val, hass)
|
||||||
|
entity_cfg[to_key] = val
|
||||||
|
|
||||||
|
if CONF_NAME in entity_cfg and isinstance(entity_cfg[CONF_NAME], str):
|
||||||
|
entity_cfg[CONF_NAME] = template.Template(entity_cfg[CONF_NAME], hass)
|
||||||
|
|
||||||
|
return entity_cfg
|
||||||
|
|
||||||
|
|
||||||
|
def rewrite_legacy_to_modern_configs(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entity_cfg: dict[str, dict],
|
||||||
|
extra_legacy_fields: dict[str, str],
|
||||||
|
) -> list[dict]:
|
||||||
|
"""Rewrite legacy configuration definitions to modern ones."""
|
||||||
|
entities = []
|
||||||
|
for object_id, entity_conf in entity_cfg.items():
|
||||||
|
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
||||||
|
|
||||||
|
entity_conf = rewrite_legacy_to_modern_config(
|
||||||
|
hass, entity_conf, extra_legacy_fields
|
||||||
|
)
|
||||||
|
|
||||||
|
if CONF_NAME not in entity_conf:
|
||||||
|
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
||||||
|
|
||||||
|
entities.append(entity_conf)
|
||||||
|
|
||||||
|
return entities
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_create_template_tracking_entities(
|
||||||
|
entity_cls: type[Entity],
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
definitions: list[dict],
|
||||||
|
unique_id_prefix: str | None,
|
||||||
|
) -> None:
|
||||||
|
"""Create the template tracking entities."""
|
||||||
|
entities: list[Entity] = []
|
||||||
|
for definition in definitions:
|
||||||
|
unique_id = definition.get(CONF_UNIQUE_ID)
|
||||||
|
if unique_id and unique_id_prefix:
|
||||||
|
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||||
|
entities.append(entity_cls(hass, definition, unique_id)) # type: ignore[call-arg]
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_template_platform(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
domain: str,
|
||||||
|
config: ConfigType,
|
||||||
|
state_entity_cls: type[TemplateEntity],
|
||||||
|
trigger_entity_cls: type[TriggerEntity] | None,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
discovery_info: DiscoveryInfoType | None,
|
||||||
|
legacy_fields: dict[str, str] | None = None,
|
||||||
|
legacy_key: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the Template platform."""
|
||||||
|
if discovery_info is None:
|
||||||
|
# Legacy Configuration
|
||||||
|
if legacy_fields is not None:
|
||||||
|
if legacy_key:
|
||||||
|
configs = rewrite_legacy_to_modern_configs(
|
||||||
|
hass, config[legacy_key], legacy_fields
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
configs = [rewrite_legacy_to_modern_config(hass, config, legacy_fields)]
|
||||||
|
async_create_template_tracking_entities(
|
||||||
|
state_entity_cls,
|
||||||
|
async_add_entities,
|
||||||
|
hass,
|
||||||
|
configs,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Template %s entities can only be configured under template:", domain
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Trigger Configuration
|
||||||
|
if "coordinator" in discovery_info:
|
||||||
|
if trigger_entity_cls:
|
||||||
|
entities = [
|
||||||
|
trigger_entity_cls(hass, discovery_info["coordinator"], config)
|
||||||
|
for config in discovery_info["entities"]
|
||||||
|
]
|
||||||
|
async_add_entities(entities)
|
||||||
|
else:
|
||||||
|
raise PlatformNotReady(
|
||||||
|
f"The template {domain} platform doesn't support trigger entities"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Modern Configuration
|
||||||
|
async_create_template_tracking_entities(
|
||||||
|
state_entity_cls,
|
||||||
|
async_add_entities,
|
||||||
|
hass,
|
||||||
|
discovery_info["entities"],
|
||||||
|
discovery_info["unique_id"],
|
||||||
|
)
|
||||||
|
@ -7,19 +7,16 @@ from typing import Any
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN, ImageEntity
|
from homeassistant.components.image import (
|
||||||
from homeassistant.config_entries import ConfigEntry
|
DOMAIN as IMAGE_DOMAIN,
|
||||||
from homeassistant.const import (
|
ENTITY_ID_FORMAT,
|
||||||
CONF_DEVICE_ID,
|
ImageEntity,
|
||||||
CONF_NAME,
|
|
||||||
CONF_UNIQUE_ID,
|
|
||||||
CONF_URL,
|
|
||||||
CONF_VERIFY_SSL,
|
|
||||||
)
|
)
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_DEVICE_ID, CONF_NAME, CONF_URL, CONF_VERIFY_SSL
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, selector
|
from homeassistant.helpers import config_validation as cv, selector
|
||||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import (
|
||||||
AddConfigEntryEntitiesCallback,
|
AddConfigEntryEntitiesCallback,
|
||||||
AddEntitiesCallback,
|
AddEntitiesCallback,
|
||||||
@ -29,6 +26,7 @@ from homeassistant.util import dt as dt_util
|
|||||||
|
|
||||||
from . import TriggerUpdateCoordinator
|
from . import TriggerUpdateCoordinator
|
||||||
from .const import CONF_PICTURE
|
from .const import CONF_PICTURE
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import (
|
from .template_entity import (
|
||||||
TemplateEntity,
|
TemplateEntity,
|
||||||
make_template_entity_common_modern_attributes_schema,
|
make_template_entity_common_modern_attributes_schema,
|
||||||
@ -59,19 +57,6 @@ IMAGE_CONFIG_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def _async_create_entities(
|
|
||||||
hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None
|
|
||||||
) -> list[StateImageEntity]:
|
|
||||||
"""Create the template image."""
|
|
||||||
entities = []
|
|
||||||
for definition in definitions:
|
|
||||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
entities.append(StateImageEntity(hass, definition, unique_id))
|
|
||||||
return entities
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -79,23 +64,14 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template image."""
|
"""Set up the template image."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_LOGGER.warning(
|
hass,
|
||||||
"Template image entities can only be configured under template:"
|
IMAGE_DOMAIN,
|
||||||
)
|
config,
|
||||||
return
|
StateImageEntity,
|
||||||
|
TriggerImageEntity,
|
||||||
if "coordinator" in discovery_info:
|
async_add_entities,
|
||||||
async_add_entities(
|
discovery_info,
|
||||||
TriggerImageEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
async_add_entities(
|
|
||||||
await _async_create_entities(
|
|
||||||
hass, discovery_info["entities"], discovery_info["unique_id"]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -118,6 +94,7 @@ class StateImageEntity(TemplateEntity, ImageEntity):
|
|||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
_attr_image_url: str | None = None
|
_attr_image_url: str | None = None
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -126,13 +103,9 @@ class StateImageEntity(TemplateEntity, ImageEntity):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the image."""
|
"""Initialize the image."""
|
||||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
ImageEntity.__init__(self, hass, config[CONF_VERIFY_SSL])
|
ImageEntity.__init__(self, hass, config[CONF_VERIFY_SSL])
|
||||||
self._url_template = config[CONF_URL]
|
self._url_template = config[CONF_URL]
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def entity_picture(self) -> str | None:
|
def entity_picture(self) -> str | None:
|
||||||
@ -162,6 +135,7 @@ class TriggerImageEntity(TriggerEntity, ImageEntity):
|
|||||||
"""Image entity based on trigger data."""
|
"""Image entity based on trigger data."""
|
||||||
|
|
||||||
_attr_image_url: str | None = None
|
_attr_image_url: str | None = None
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
domain = IMAGE_DOMAIN
|
domain = IMAGE_DOMAIN
|
||||||
extra_template_keys = (CONF_URL,)
|
extra_template_keys = (CONF_URL,)
|
||||||
|
@ -43,20 +43,18 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, template
|
from homeassistant.helpers import config_validation as cv, template
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
from homeassistant.util import color as color_util
|
from homeassistant.util import color as color_util
|
||||||
|
|
||||||
from . import TriggerUpdateCoordinator
|
from . import TriggerUpdateCoordinator
|
||||||
from .const import CONF_OBJECT_ID, DOMAIN
|
from .const import DOMAIN
|
||||||
from .entity import AbstractTemplateEntity
|
from .entity import AbstractTemplateEntity
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import (
|
from .template_entity import (
|
||||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
|
||||||
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
||||||
TemplateEntity,
|
TemplateEntity,
|
||||||
make_template_entity_common_modern_schema,
|
make_template_entity_common_modern_schema,
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
)
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
@ -103,7 +101,7 @@ CONF_WHITE_VALUE_TEMPLATE = "white_value_template"
|
|||||||
DEFAULT_MIN_MIREDS = 153
|
DEFAULT_MIN_MIREDS = 153
|
||||||
DEFAULT_MAX_MIREDS = 500
|
DEFAULT_MAX_MIREDS = 500
|
||||||
|
|
||||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
LEGACY_FIELDS = {
|
||||||
CONF_COLOR_ACTION: CONF_HS_ACTION,
|
CONF_COLOR_ACTION: CONF_HS_ACTION,
|
||||||
CONF_COLOR_TEMPLATE: CONF_HS,
|
CONF_COLOR_TEMPLATE: CONF_HS,
|
||||||
CONF_EFFECT_LIST_TEMPLATE: CONF_EFFECT_LIST,
|
CONF_EFFECT_LIST_TEMPLATE: CONF_EFFECT_LIST,
|
||||||
@ -193,47 +191,6 @@ PLATFORM_SCHEMA = vol.All(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rewrite_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant, config: dict[str, dict]
|
|
||||||
) -> list[dict]:
|
|
||||||
"""Rewrite legacy switch configuration definitions to modern ones."""
|
|
||||||
lights = []
|
|
||||||
for object_id, entity_conf in config.items():
|
|
||||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
|
||||||
|
|
||||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass, entity_conf, LEGACY_FIELDS
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_NAME not in entity_conf:
|
|
||||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
|
||||||
|
|
||||||
lights.append(entity_conf)
|
|
||||||
|
|
||||||
return lights
|
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the Template Lights."""
|
|
||||||
lights = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
lights.append(LightTemplate(hass, entity_conf, unique_id))
|
|
||||||
|
|
||||||
async_add_entities(lights)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -241,33 +198,24 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template lights."""
|
"""Set up the template lights."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
rewrite_legacy_to_modern_conf(hass, config[CONF_LIGHTS]),
|
LIGHT_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
StateLightEntity,
|
||||||
return
|
TriggerLightEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerLightEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
legacy_key=CONF_LIGHTS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||||
"""Representation of a template lights features."""
|
"""Representation of a template lights features."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||||
def __init__( # pylint: disable=super-init-not-called
|
def __init__( # pylint: disable=super-init-not-called
|
||||||
@ -934,7 +882,7 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
|||||||
self._attr_supported_features |= LightEntityFeature.TRANSITION
|
self._attr_supported_features |= LightEntityFeature.TRANSITION
|
||||||
|
|
||||||
|
|
||||||
class LightTemplate(TemplateEntity, AbstractTemplateLight):
|
class StateLightEntity(TemplateEntity, AbstractTemplateLight):
|
||||||
"""Representation of a templated Light, including dimmable."""
|
"""Representation of a templated Light, including dimmable."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
@ -946,12 +894,8 @@ class LightTemplate(TemplateEntity, AbstractTemplateLight):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the light."""
|
"""Initialize the light."""
|
||||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
AbstractTemplateLight.__init__(self, config)
|
AbstractTemplateLight.__init__(self, config)
|
||||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
|
||||||
self.entity_id = async_generate_entity_id(
|
|
||||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
|
||||||
)
|
|
||||||
name = self._attr_name
|
name = self._attr_name
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert name is not None
|
assert name is not None
|
||||||
|
@ -9,6 +9,7 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.components.lock import (
|
from homeassistant.components.lock import (
|
||||||
DOMAIN as LOCK_DOMAIN,
|
DOMAIN as LOCK_DOMAIN,
|
||||||
|
ENTITY_ID_FORMAT,
|
||||||
PLATFORM_SCHEMA as LOCK_PLATFORM_SCHEMA,
|
PLATFORM_SCHEMA as LOCK_PLATFORM_SCHEMA,
|
||||||
LockEntity,
|
LockEntity,
|
||||||
LockEntityFeature,
|
LockEntityFeature,
|
||||||
@ -31,12 +32,11 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|||||||
from .const import CONF_PICTURE, DOMAIN
|
from .const import CONF_PICTURE, DOMAIN
|
||||||
from .coordinator import TriggerUpdateCoordinator
|
from .coordinator import TriggerUpdateCoordinator
|
||||||
from .entity import AbstractTemplateEntity
|
from .entity import AbstractTemplateEntity
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import (
|
from .template_entity import (
|
||||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
|
||||||
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
||||||
TemplateEntity,
|
TemplateEntity,
|
||||||
make_template_entity_common_modern_schema,
|
make_template_entity_common_modern_schema,
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
)
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
@ -49,7 +49,7 @@ CONF_OPEN = "open"
|
|||||||
DEFAULT_NAME = "Template Lock"
|
DEFAULT_NAME = "Template Lock"
|
||||||
DEFAULT_OPTIMISTIC = False
|
DEFAULT_OPTIMISTIC = False
|
||||||
|
|
||||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
LEGACY_FIELDS = {
|
||||||
CONF_CODE_FORMAT_TEMPLATE: CONF_CODE_FORMAT,
|
CONF_CODE_FORMAT_TEMPLATE: CONF_CODE_FORMAT,
|
||||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||||
}
|
}
|
||||||
@ -83,33 +83,6 @@ PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend(
|
|||||||
).extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY.schema)
|
).extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY.schema)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the template fans."""
|
|
||||||
fans = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
fans.append(
|
|
||||||
TemplateLock(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(fans)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -117,33 +90,23 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template fans."""
|
"""Set up the template fans."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
[rewrite_common_legacy_to_modern_conf(hass, config, LEGACY_FIELDS)],
|
LOCK_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
StateLockEntity,
|
||||||
return
|
TriggerLockEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerLockEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||||
"""Representation of a template lock features."""
|
"""Representation of a template lock features."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||||
@ -311,7 +274,7 @@ class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class TemplateLock(TemplateEntity, AbstractTemplateLock):
|
class StateLockEntity(TemplateEntity, AbstractTemplateLock):
|
||||||
"""Representation of a template lock."""
|
"""Representation of a template lock."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
@ -323,7 +286,7 @@ class TemplateLock(TemplateEntity, AbstractTemplateLock):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the lock."""
|
"""Initialize the lock."""
|
||||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
AbstractTemplateLock.__init__(self, config)
|
AbstractTemplateLock.__init__(self, config)
|
||||||
name = self._attr_name
|
name = self._attr_name
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -13,6 +13,7 @@ from homeassistant.components.number import (
|
|||||||
DEFAULT_MIN_VALUE,
|
DEFAULT_MIN_VALUE,
|
||||||
DEFAULT_STEP,
|
DEFAULT_STEP,
|
||||||
DOMAIN as NUMBER_DOMAIN,
|
DOMAIN as NUMBER_DOMAIN,
|
||||||
|
ENTITY_ID_FORMAT,
|
||||||
NumberEntity,
|
NumberEntity,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@ -21,12 +22,10 @@ from homeassistant.const import (
|
|||||||
CONF_NAME,
|
CONF_NAME,
|
||||||
CONF_OPTIMISTIC,
|
CONF_OPTIMISTIC,
|
||||||
CONF_STATE,
|
CONF_STATE,
|
||||||
CONF_UNIQUE_ID,
|
|
||||||
CONF_UNIT_OF_MEASUREMENT,
|
CONF_UNIT_OF_MEASUREMENT,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import config_validation as cv, selector
|
from homeassistant.helpers import config_validation as cv, selector
|
||||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import (
|
||||||
AddConfigEntryEntitiesCallback,
|
AddConfigEntryEntitiesCallback,
|
||||||
AddEntitiesCallback,
|
AddEntitiesCallback,
|
||||||
@ -35,6 +34,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|||||||
|
|
||||||
from . import TriggerUpdateCoordinator
|
from . import TriggerUpdateCoordinator
|
||||||
from .const import CONF_MAX, CONF_MIN, CONF_STEP, DOMAIN
|
from .const import CONF_MAX, CONF_MIN, CONF_STEP, DOMAIN
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
@ -70,19 +70,6 @@ NUMBER_CONFIG_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def _async_create_entities(
|
|
||||||
hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None
|
|
||||||
) -> list[TemplateNumber]:
|
|
||||||
"""Create the Template number."""
|
|
||||||
entities = []
|
|
||||||
for definition in definitions:
|
|
||||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
entities.append(TemplateNumber(hass, definition, unique_id))
|
|
||||||
return entities
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -90,23 +77,14 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template number."""
|
"""Set up the template number."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_LOGGER.warning(
|
hass,
|
||||||
"Template number entities can only be configured under template:"
|
NUMBER_DOMAIN,
|
||||||
)
|
config,
|
||||||
return
|
StateNumberEntity,
|
||||||
|
TriggerNumberEntity,
|
||||||
if "coordinator" in discovery_info:
|
async_add_entities,
|
||||||
async_add_entities(
|
discovery_info,
|
||||||
TriggerNumberEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
async_add_entities(
|
|
||||||
await _async_create_entities(
|
|
||||||
hass, discovery_info["entities"], discovery_info["unique_id"]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -119,22 +97,25 @@ async def async_setup_entry(
|
|||||||
_options = dict(config_entry.options)
|
_options = dict(config_entry.options)
|
||||||
_options.pop("template_type")
|
_options.pop("template_type")
|
||||||
validated_config = NUMBER_CONFIG_SCHEMA(_options)
|
validated_config = NUMBER_CONFIG_SCHEMA(_options)
|
||||||
async_add_entities([TemplateNumber(hass, validated_config, config_entry.entry_id)])
|
async_add_entities(
|
||||||
|
[StateNumberEntity(hass, validated_config, config_entry.entry_id)]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_create_preview_number(
|
def async_create_preview_number(
|
||||||
hass: HomeAssistant, name: str, config: dict[str, Any]
|
hass: HomeAssistant, name: str, config: dict[str, Any]
|
||||||
) -> TemplateNumber:
|
) -> StateNumberEntity:
|
||||||
"""Create a preview number."""
|
"""Create a preview number."""
|
||||||
validated_config = NUMBER_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
validated_config = NUMBER_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
||||||
return TemplateNumber(hass, validated_config, None)
|
return StateNumberEntity(hass, validated_config, None)
|
||||||
|
|
||||||
|
|
||||||
class TemplateNumber(TemplateEntity, NumberEntity):
|
class StateNumberEntity(TemplateEntity, NumberEntity):
|
||||||
"""Representation of a template number."""
|
"""Representation of a template number."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -143,8 +124,10 @@ class TemplateNumber(TemplateEntity, NumberEntity):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the number."""
|
"""Initialize the number."""
|
||||||
super().__init__(hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
|
if TYPE_CHECKING:
|
||||||
assert self._attr_name is not None
|
assert self._attr_name is not None
|
||||||
|
|
||||||
self._value_template = config[CONF_STATE]
|
self._value_template = config[CONF_STATE]
|
||||||
self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], self._attr_name, DOMAIN)
|
self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], self._attr_name, DOMAIN)
|
||||||
|
|
||||||
@ -156,10 +139,6 @@ class TemplateNumber(TemplateEntity, NumberEntity):
|
|||||||
self._attr_native_step = DEFAULT_STEP
|
self._attr_native_step = DEFAULT_STEP
|
||||||
self._attr_native_min_value = DEFAULT_MIN_VALUE
|
self._attr_native_min_value = DEFAULT_MIN_VALUE
|
||||||
self._attr_native_max_value = DEFAULT_MAX_VALUE
|
self._attr_native_max_value = DEFAULT_MAX_VALUE
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_setup_templates(self) -> None:
|
def _async_setup_templates(self) -> None:
|
||||||
@ -208,6 +187,7 @@ class TemplateNumber(TemplateEntity, NumberEntity):
|
|||||||
class TriggerNumberEntity(TriggerEntity, NumberEntity):
|
class TriggerNumberEntity(TriggerEntity, NumberEntity):
|
||||||
"""Number entity based on trigger data."""
|
"""Number entity based on trigger data."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
domain = NUMBER_DOMAIN
|
domain = NUMBER_DOMAIN
|
||||||
extra_template_keys = (
|
extra_template_keys = (
|
||||||
CONF_STATE,
|
CONF_STATE,
|
||||||
|
@ -11,19 +11,13 @@ from homeassistant.components.select import (
|
|||||||
ATTR_OPTION,
|
ATTR_OPTION,
|
||||||
ATTR_OPTIONS,
|
ATTR_OPTIONS,
|
||||||
DOMAIN as SELECT_DOMAIN,
|
DOMAIN as SELECT_DOMAIN,
|
||||||
|
ENTITY_ID_FORMAT,
|
||||||
SelectEntity,
|
SelectEntity,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import CONF_DEVICE_ID, CONF_NAME, CONF_OPTIMISTIC, CONF_STATE
|
||||||
CONF_DEVICE_ID,
|
|
||||||
CONF_NAME,
|
|
||||||
CONF_OPTIMISTIC,
|
|
||||||
CONF_STATE,
|
|
||||||
CONF_UNIQUE_ID,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import config_validation as cv, selector
|
from homeassistant.helpers import config_validation as cv, selector
|
||||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import (
|
||||||
AddConfigEntryEntitiesCallback,
|
AddConfigEntryEntitiesCallback,
|
||||||
AddEntitiesCallback,
|
AddEntitiesCallback,
|
||||||
@ -33,6 +27,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|||||||
from . import TriggerUpdateCoordinator
|
from . import TriggerUpdateCoordinator
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .entity import AbstractTemplateEntity
|
from .entity import AbstractTemplateEntity
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
@ -65,19 +60,6 @@ SELECT_CONFIG_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def _async_create_entities(
|
|
||||||
hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None
|
|
||||||
) -> list[TemplateSelect]:
|
|
||||||
"""Create the Template select."""
|
|
||||||
entities = []
|
|
||||||
for definition in definitions:
|
|
||||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
entities.append(TemplateSelect(hass, definition, unique_id))
|
|
||||||
return entities
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -85,23 +67,14 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template select."""
|
"""Set up the template select."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_LOGGER.warning(
|
hass,
|
||||||
"Template select entities can only be configured under template:"
|
SELECT_DOMAIN,
|
||||||
)
|
config,
|
||||||
return
|
TemplateSelect,
|
||||||
|
TriggerSelectEntity,
|
||||||
if "coordinator" in discovery_info:
|
async_add_entities,
|
||||||
async_add_entities(
|
discovery_info,
|
||||||
TriggerSelectEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
async_add_entities(
|
|
||||||
await _async_create_entities(
|
|
||||||
hass, discovery_info["entities"], discovery_info["unique_id"]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -120,6 +93,8 @@ async def async_setup_entry(
|
|||||||
class AbstractTemplateSelect(AbstractTemplateEntity, SelectEntity):
|
class AbstractTemplateSelect(AbstractTemplateEntity, SelectEntity):
|
||||||
"""Representation of a template select features."""
|
"""Representation of a template select features."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||||
@ -159,7 +134,7 @@ class TemplateSelect(TemplateEntity, AbstractTemplateSelect):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the select."""
|
"""Initialize the select."""
|
||||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
AbstractTemplateSelect.__init__(self, config)
|
AbstractTemplateSelect.__init__(self, config)
|
||||||
|
|
||||||
name = self._attr_name
|
name = self._attr_name
|
||||||
@ -169,11 +144,6 @@ class TemplateSelect(TemplateEntity, AbstractTemplateSelect):
|
|||||||
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
||||||
self.add_script(CONF_SELECT_OPTION, select_option, name, DOMAIN)
|
self.add_script(CONF_SELECT_OPTION, select_option, name, DOMAIN)
|
||||||
|
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_setup_templates(self) -> None:
|
def _async_setup_templates(self) -> None:
|
||||||
"""Set up templates."""
|
"""Set up templates."""
|
||||||
|
@ -44,8 +44,6 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, selector, template
|
from homeassistant.helpers import config_validation as cv, selector, template
|
||||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import (
|
||||||
AddConfigEntryEntitiesCallback,
|
AddConfigEntryEntitiesCallback,
|
||||||
AddEntitiesCallback,
|
AddEntitiesCallback,
|
||||||
@ -55,17 +53,13 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from . import TriggerUpdateCoordinator
|
from . import TriggerUpdateCoordinator
|
||||||
from .const import CONF_ATTRIBUTE_TEMPLATES, CONF_AVAILABILITY_TEMPLATE, CONF_OBJECT_ID
|
from .const import CONF_ATTRIBUTE_TEMPLATES, CONF_AVAILABILITY_TEMPLATE
|
||||||
from .template_entity import (
|
from .helpers import async_setup_template_platform
|
||||||
TEMPLATE_ENTITY_COMMON_SCHEMA,
|
from .template_entity import TEMPLATE_ENTITY_COMMON_SCHEMA, TemplateEntity
|
||||||
TemplateEntity,
|
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
LEGACY_FIELDS = {
|
LEGACY_FIELDS = {
|
||||||
CONF_FRIENDLY_NAME_TEMPLATE: CONF_NAME,
|
CONF_FRIENDLY_NAME_TEMPLATE: CONF_NAME,
|
||||||
CONF_FRIENDLY_NAME: CONF_NAME,
|
|
||||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -142,27 +136,6 @@ def extra_validation_checks(val):
|
|||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
def rewrite_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant, cfg: dict[str, dict]
|
|
||||||
) -> list[dict]:
|
|
||||||
"""Rewrite legacy sensor definitions to modern ones."""
|
|
||||||
sensors = []
|
|
||||||
|
|
||||||
for object_id, entity_cfg in cfg.items():
|
|
||||||
entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id}
|
|
||||||
|
|
||||||
entity_cfg = rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass, entity_cfg, LEGACY_FIELDS
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_NAME not in entity_cfg:
|
|
||||||
entity_cfg[CONF_NAME] = template.Template(object_id, hass)
|
|
||||||
|
|
||||||
sensors.append(entity_cfg)
|
|
||||||
|
|
||||||
return sensors
|
|
||||||
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = vol.All(
|
PLATFORM_SCHEMA = vol.All(
|
||||||
SENSOR_PLATFORM_SCHEMA.extend(
|
SENSOR_PLATFORM_SCHEMA.extend(
|
||||||
{
|
{
|
||||||
@ -177,33 +150,6 @@ PLATFORM_SCHEMA = vol.All(
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback | AddConfigEntryEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the template sensors."""
|
|
||||||
sensors = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
sensors.append(
|
|
||||||
SensorTemplate(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(sensors)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -211,27 +157,16 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template sensors."""
|
"""Set up the template sensors."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
rewrite_legacy_to_modern_conf(hass, config[CONF_SENSORS]),
|
SENSOR_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
StateSensorEntity,
|
||||||
return
|
TriggerSensorEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerSensorEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
legacy_key=CONF_SENSORS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -244,22 +179,25 @@ async def async_setup_entry(
|
|||||||
_options = dict(config_entry.options)
|
_options = dict(config_entry.options)
|
||||||
_options.pop("template_type")
|
_options.pop("template_type")
|
||||||
validated_config = SENSOR_CONFIG_SCHEMA(_options)
|
validated_config = SENSOR_CONFIG_SCHEMA(_options)
|
||||||
async_add_entities([SensorTemplate(hass, validated_config, config_entry.entry_id)])
|
async_add_entities(
|
||||||
|
[StateSensorEntity(hass, validated_config, config_entry.entry_id)]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_create_preview_sensor(
|
def async_create_preview_sensor(
|
||||||
hass: HomeAssistant, name: str, config: dict[str, Any]
|
hass: HomeAssistant, name: str, config: dict[str, Any]
|
||||||
) -> SensorTemplate:
|
) -> StateSensorEntity:
|
||||||
"""Create a preview sensor."""
|
"""Create a preview sensor."""
|
||||||
validated_config = SENSOR_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
validated_config = SENSOR_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
||||||
return SensorTemplate(hass, validated_config, None)
|
return StateSensorEntity(hass, validated_config, None)
|
||||||
|
|
||||||
|
|
||||||
class SensorTemplate(TemplateEntity, SensorEntity):
|
class StateSensorEntity(TemplateEntity, SensorEntity):
|
||||||
"""Representation of a Template Sensor."""
|
"""Representation of a Template Sensor."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -268,7 +206,7 @@ class SensorTemplate(TemplateEntity, SensorEntity):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
super().__init__(hass, config=config, fallback_name=None, unique_id=unique_id)
|
super().__init__(hass, config, unique_id)
|
||||||
self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||||
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
||||||
@ -276,14 +214,6 @@ class SensorTemplate(TemplateEntity, SensorEntity):
|
|||||||
self._attr_last_reset_template: template.Template | None = config.get(
|
self._attr_last_reset_template: template.Template | None = config.get(
|
||||||
ATTR_LAST_RESET
|
ATTR_LAST_RESET
|
||||||
)
|
)
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
|
||||||
self.entity_id = async_generate_entity_id(
|
|
||||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
|
||||||
)
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_setup_templates(self) -> None:
|
def _async_setup_templates(self) -> None:
|
||||||
@ -327,6 +257,7 @@ class SensorTemplate(TemplateEntity, SensorEntity):
|
|||||||
class TriggerSensorEntity(TriggerEntity, RestoreSensor):
|
class TriggerSensorEntity(TriggerEntity, RestoreSensor):
|
||||||
"""Sensor entity based on trigger data."""
|
"""Sensor entity based on trigger data."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
domain = SENSOR_DOMAIN
|
domain = SENSOR_DOMAIN
|
||||||
extra_template_keys = (CONF_STATE,)
|
extra_template_keys = (CONF_STATE,)
|
||||||
|
|
||||||
|
@ -30,8 +30,6 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, selector, template
|
from homeassistant.helpers import config_validation as cv, selector, template
|
||||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import (
|
||||||
AddConfigEntryEntitiesCallback,
|
AddConfigEntryEntitiesCallback,
|
||||||
AddEntitiesCallback,
|
AddEntitiesCallback,
|
||||||
@ -40,19 +38,18 @@ from homeassistant.helpers.restore_state import RestoreEntity
|
|||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
from . import TriggerUpdateCoordinator
|
from . import TriggerUpdateCoordinator
|
||||||
from .const import CONF_OBJECT_ID, CONF_TURN_OFF, CONF_TURN_ON, DOMAIN
|
from .const import CONF_TURN_OFF, CONF_TURN_ON, DOMAIN
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import (
|
from .template_entity import (
|
||||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
|
||||||
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
||||||
TemplateEntity,
|
TemplateEntity,
|
||||||
make_template_entity_common_modern_schema,
|
make_template_entity_common_modern_schema,
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
)
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
|
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
|
||||||
|
|
||||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
LEGACY_FIELDS = {
|
||||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,27 +93,6 @@ SWITCH_CONFIG_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rewrite_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant, config: dict[str, dict]
|
|
||||||
) -> list[dict]:
|
|
||||||
"""Rewrite legacy switch configuration definitions to modern ones."""
|
|
||||||
switches = []
|
|
||||||
|
|
||||||
for object_id, entity_conf in config.items():
|
|
||||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
|
||||||
|
|
||||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass, entity_conf, LEGACY_FIELDS
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_NAME not in entity_conf:
|
|
||||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
|
||||||
|
|
||||||
switches.append(entity_conf)
|
|
||||||
|
|
||||||
return switches
|
|
||||||
|
|
||||||
|
|
||||||
def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str, dict]:
|
def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str, dict]:
|
||||||
"""Rewrite option configuration to modern configuration."""
|
"""Rewrite option configuration to modern configuration."""
|
||||||
option_config = {**option_config}
|
option_config = {**option_config}
|
||||||
@ -127,33 +103,6 @@ def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str,
|
|||||||
return option_config
|
return option_config
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the template switches."""
|
|
||||||
switches = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
switches.append(
|
|
||||||
SwitchTemplate(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(switches)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -161,27 +110,16 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the template switches."""
|
"""Set up the template switches."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
rewrite_legacy_to_modern_conf(hass, config[CONF_SWITCHES]),
|
SWITCH_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
StateSwitchEntity,
|
||||||
return
|
TriggerSwitchEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerSwitchEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
legacy_key=CONF_SWITCHES,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -195,23 +133,26 @@ async def async_setup_entry(
|
|||||||
_options.pop("template_type")
|
_options.pop("template_type")
|
||||||
_options = rewrite_options_to_modern_conf(_options)
|
_options = rewrite_options_to_modern_conf(_options)
|
||||||
validated_config = SWITCH_CONFIG_SCHEMA(_options)
|
validated_config = SWITCH_CONFIG_SCHEMA(_options)
|
||||||
async_add_entities([SwitchTemplate(hass, validated_config, config_entry.entry_id)])
|
async_add_entities(
|
||||||
|
[StateSwitchEntity(hass, validated_config, config_entry.entry_id)]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_create_preview_switch(
|
def async_create_preview_switch(
|
||||||
hass: HomeAssistant, name: str, config: dict[str, Any]
|
hass: HomeAssistant, name: str, config: dict[str, Any]
|
||||||
) -> SwitchTemplate:
|
) -> StateSwitchEntity:
|
||||||
"""Create a preview switch."""
|
"""Create a preview switch."""
|
||||||
updated_config = rewrite_options_to_modern_conf(config)
|
updated_config = rewrite_options_to_modern_conf(config)
|
||||||
validated_config = SWITCH_CONFIG_SCHEMA(updated_config | {CONF_NAME: name})
|
validated_config = SWITCH_CONFIG_SCHEMA(updated_config | {CONF_NAME: name})
|
||||||
return SwitchTemplate(hass, validated_config, None)
|
return StateSwitchEntity(hass, validated_config, None)
|
||||||
|
|
||||||
|
|
||||||
class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
|
class StateSwitchEntity(TemplateEntity, SwitchEntity, RestoreEntity):
|
||||||
"""Representation of a Template switch."""
|
"""Representation of a Template switch."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -220,11 +161,8 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the Template switch."""
|
"""Initialize the Template switch."""
|
||||||
super().__init__(hass, config=config, unique_id=unique_id)
|
super().__init__(hass, config, unique_id)
|
||||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
|
||||||
self.entity_id = async_generate_entity_id(
|
|
||||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
|
||||||
)
|
|
||||||
name = self._attr_name
|
name = self._attr_name
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert name is not None
|
assert name is not None
|
||||||
@ -238,10 +176,6 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
|
|||||||
|
|
||||||
self._state: bool | None = False
|
self._state: bool | None = False
|
||||||
self._attr_assumed_state = self._template is None
|
self._attr_assumed_state = self._template is None
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _update_state(self, result):
|
def _update_state(self, result):
|
||||||
@ -304,6 +238,7 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
|
|||||||
class TriggerSwitchEntity(TriggerEntity, SwitchEntity, RestoreEntity):
|
class TriggerSwitchEntity(TriggerEntity, SwitchEntity, RestoreEntity):
|
||||||
"""Switch entity based on trigger data."""
|
"""Switch entity based on trigger data."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
domain = SWITCH_DOMAIN
|
domain = SWITCH_DOMAIN
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@ -314,6 +249,7 @@ class TriggerSwitchEntity(TriggerEntity, SwitchEntity, RestoreEntity):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the entity."""
|
"""Initialize the entity."""
|
||||||
super().__init__(hass, coordinator, config)
|
super().__init__(hass, coordinator, config)
|
||||||
|
|
||||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||||
self._template = config.get(CONF_STATE)
|
self._template = config.get(CONF_STATE)
|
||||||
if on_action := config.get(CONF_TURN_ON):
|
if on_action := config.get(CONF_TURN_ON):
|
||||||
@ -326,11 +262,6 @@ class TriggerSwitchEntity(TriggerEntity, SwitchEntity, RestoreEntity):
|
|||||||
self._to_render_simple.append(CONF_STATE)
|
self._to_render_simple.append(CONF_STATE)
|
||||||
self._parse_result.add(CONF_STATE)
|
self._parse_result.add(CONF_STATE)
|
||||||
|
|
||||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
|
||||||
hass,
|
|
||||||
config.get(CONF_DEVICE_ID),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_added_to_hass(self) -> None:
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Restore last state."""
|
"""Restore last state."""
|
||||||
await super().async_added_to_hass()
|
await super().async_added_to_hass()
|
||||||
|
@ -4,7 +4,6 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from collections.abc import Callable, Mapping
|
from collections.abc import Callable, Mapping
|
||||||
import contextlib
|
import contextlib
|
||||||
import itertools
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
@ -14,7 +13,6 @@ import voluptuous as vol
|
|||||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_ENTITY_PICTURE_TEMPLATE,
|
CONF_ENTITY_PICTURE_TEMPLATE,
|
||||||
CONF_FRIENDLY_NAME,
|
|
||||||
CONF_ICON,
|
CONF_ICON,
|
||||||
CONF_ICON_TEMPLATE,
|
CONF_ICON_TEMPLATE,
|
||||||
CONF_NAME,
|
CONF_NAME,
|
||||||
@ -137,42 +135,6 @@ TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY = vol.Schema(
|
|||||||
).extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY.schema)
|
).extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY.schema)
|
||||||
|
|
||||||
|
|
||||||
LEGACY_FIELDS = {
|
|
||||||
CONF_ICON_TEMPLATE: CONF_ICON,
|
|
||||||
CONF_ENTITY_PICTURE_TEMPLATE: CONF_PICTURE,
|
|
||||||
CONF_AVAILABILITY_TEMPLATE: CONF_AVAILABILITY,
|
|
||||||
CONF_ATTRIBUTE_TEMPLATES: CONF_ATTRIBUTES,
|
|
||||||
CONF_FRIENDLY_NAME: CONF_NAME,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
entity_cfg: dict[str, Any],
|
|
||||||
extra_legacy_fields: dict[str, str] | None = None,
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Rewrite legacy config."""
|
|
||||||
entity_cfg = {**entity_cfg}
|
|
||||||
if extra_legacy_fields is None:
|
|
||||||
extra_legacy_fields = {}
|
|
||||||
|
|
||||||
for from_key, to_key in itertools.chain(
|
|
||||||
LEGACY_FIELDS.items(), extra_legacy_fields.items()
|
|
||||||
):
|
|
||||||
if from_key not in entity_cfg or to_key in entity_cfg:
|
|
||||||
continue
|
|
||||||
|
|
||||||
val = entity_cfg.pop(from_key)
|
|
||||||
if isinstance(val, str):
|
|
||||||
val = Template(val, hass)
|
|
||||||
entity_cfg[to_key] = val
|
|
||||||
|
|
||||||
if CONF_NAME in entity_cfg and isinstance(entity_cfg[CONF_NAME], str):
|
|
||||||
entity_cfg[CONF_NAME] = Template(entity_cfg[CONF_NAME], hass)
|
|
||||||
|
|
||||||
return entity_cfg
|
|
||||||
|
|
||||||
|
|
||||||
class _TemplateAttribute:
|
class _TemplateAttribute:
|
||||||
"""Attribute value linked to template result."""
|
"""Attribute value linked to template result."""
|
||||||
|
|
||||||
@ -278,17 +240,11 @@ class TemplateEntity(AbstractTemplateEntity):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
*,
|
config: ConfigType,
|
||||||
availability_template: Template | None = None,
|
unique_id: str | None,
|
||||||
icon_template: Template | None = None,
|
|
||||||
entity_picture_template: Template | None = None,
|
|
||||||
attribute_templates: dict[str, Template] | None = None,
|
|
||||||
config: ConfigType | None = None,
|
|
||||||
fallback_name: str | None = None,
|
|
||||||
unique_id: str | None = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Template Entity."""
|
"""Template Entity."""
|
||||||
AbstractTemplateEntity.__init__(self, hass)
|
AbstractTemplateEntity.__init__(self, hass, config)
|
||||||
self._template_attrs: dict[Template, list[_TemplateAttribute]] = {}
|
self._template_attrs: dict[Template, list[_TemplateAttribute]] = {}
|
||||||
self._template_result_info: TrackTemplateResultInfo | None = None
|
self._template_result_info: TrackTemplateResultInfo | None = None
|
||||||
self._attr_extra_state_attributes = {}
|
self._attr_extra_state_attributes = {}
|
||||||
@ -307,15 +263,6 @@ class TemplateEntity(AbstractTemplateEntity):
|
|||||||
| None
|
| None
|
||||||
) = None
|
) = None
|
||||||
self._run_variables: ScriptVariables | dict
|
self._run_variables: ScriptVariables | dict
|
||||||
if config is None:
|
|
||||||
self._attribute_templates = attribute_templates
|
|
||||||
self._availability_template = availability_template
|
|
||||||
self._icon_template = icon_template
|
|
||||||
self._entity_picture_template = entity_picture_template
|
|
||||||
self._friendly_name_template = None
|
|
||||||
self._run_variables = {}
|
|
||||||
self._blueprint_inputs = None
|
|
||||||
else:
|
|
||||||
self._attribute_templates = config.get(CONF_ATTRIBUTES)
|
self._attribute_templates = config.get(CONF_ATTRIBUTES)
|
||||||
self._availability_template = config.get(CONF_AVAILABILITY)
|
self._availability_template = config.get(CONF_AVAILABILITY)
|
||||||
self._icon_template = config.get(CONF_ICON)
|
self._icon_template = config.get(CONF_ICON)
|
||||||
@ -340,7 +287,7 @@ class TemplateEntity(AbstractTemplateEntity):
|
|||||||
variables = {"this": DummyState()}
|
variables = {"this": DummyState()}
|
||||||
|
|
||||||
# Try to render the name as it can influence the entity ID
|
# Try to render the name as it can influence the entity ID
|
||||||
self._attr_name = fallback_name
|
self._attr_name = None
|
||||||
if self._friendly_name_template:
|
if self._friendly_name_template:
|
||||||
with contextlib.suppress(TemplateError):
|
with contextlib.suppress(TemplateError):
|
||||||
self._attr_name = self._friendly_name_template.async_render(
|
self._attr_name = self._friendly_name_template.async_render(
|
||||||
|
@ -30,7 +30,7 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
|
|||||||
"""Initialize the entity."""
|
"""Initialize the entity."""
|
||||||
CoordinatorEntity.__init__(self, coordinator)
|
CoordinatorEntity.__init__(self, coordinator)
|
||||||
TriggerBaseEntity.__init__(self, hass, config)
|
TriggerBaseEntity.__init__(self, hass, config)
|
||||||
AbstractTemplateEntity.__init__(self, hass)
|
AbstractTemplateEntity.__init__(self, hass, config)
|
||||||
|
|
||||||
self._state_render_error = False
|
self._state_render_error = False
|
||||||
|
|
||||||
|
@ -34,20 +34,18 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, template
|
from homeassistant.helpers import config_validation as cv, template
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
from .const import CONF_OBJECT_ID, DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import TriggerUpdateCoordinator
|
from .coordinator import TriggerUpdateCoordinator
|
||||||
from .entity import AbstractTemplateEntity
|
from .entity import AbstractTemplateEntity
|
||||||
|
from .helpers import async_setup_template_platform
|
||||||
from .template_entity import (
|
from .template_entity import (
|
||||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
|
||||||
TEMPLATE_ENTITY_ATTRIBUTES_SCHEMA_LEGACY,
|
TEMPLATE_ENTITY_ATTRIBUTES_SCHEMA_LEGACY,
|
||||||
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
||||||
TemplateEntity,
|
TemplateEntity,
|
||||||
make_template_entity_common_modern_attributes_schema,
|
make_template_entity_common_modern_attributes_schema,
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
)
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
@ -72,7 +70,7 @@ _VALID_STATES = [
|
|||||||
VacuumActivity.ERROR,
|
VacuumActivity.ERROR,
|
||||||
]
|
]
|
||||||
|
|
||||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
LEGACY_FIELDS = {
|
||||||
CONF_BATTERY_LEVEL_TEMPLATE: CONF_BATTERY_LEVEL,
|
CONF_BATTERY_LEVEL_TEMPLATE: CONF_BATTERY_LEVEL,
|
||||||
CONF_FAN_SPEED_TEMPLATE: CONF_FAN_SPEED,
|
CONF_FAN_SPEED_TEMPLATE: CONF_FAN_SPEED,
|
||||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||||
@ -125,88 +123,31 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rewrite_legacy_to_modern_conf(
|
|
||||||
hass: HomeAssistant, config: dict[str, dict]
|
|
||||||
) -> list[dict]:
|
|
||||||
"""Rewrite legacy switch configuration definitions to modern ones."""
|
|
||||||
vacuums = []
|
|
||||||
|
|
||||||
for object_id, entity_conf in config.items():
|
|
||||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
|
||||||
|
|
||||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
|
||||||
hass, entity_conf, LEGACY_FIELDS
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_NAME not in entity_conf:
|
|
||||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
|
||||||
|
|
||||||
vacuums.append(entity_conf)
|
|
||||||
|
|
||||||
return vacuums
|
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the template switches."""
|
|
||||||
vacuums = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
vacuums.append(
|
|
||||||
TemplateVacuum(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(vacuums)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Template cover."""
|
"""Set up the Template vacuum."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
|
||||||
hass,
|
hass,
|
||||||
rewrite_legacy_to_modern_conf(hass, config[CONF_VACUUMS]),
|
VACUUM_DOMAIN,
|
||||||
None,
|
config,
|
||||||
)
|
TemplateStateVacuumEntity,
|
||||||
return
|
TriggerVacuumEntity,
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerVacuumEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
LEGACY_FIELDS,
|
||||||
discovery_info["unique_id"],
|
legacy_key=CONF_VACUUMS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||||
"""Representation of a template vacuum features."""
|
"""Representation of a template vacuum features."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||||
@ -350,7 +291,7 @@ class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
|||||||
self._attr_fan_speed = None
|
self._attr_fan_speed = None
|
||||||
|
|
||||||
|
|
||||||
class TemplateVacuum(TemplateEntity, AbstractTemplateVacuum):
|
class TemplateStateVacuumEntity(TemplateEntity, AbstractTemplateVacuum):
|
||||||
"""A template vacuum component."""
|
"""A template vacuum component."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
@ -362,12 +303,8 @@ class TemplateVacuum(TemplateEntity, AbstractTemplateVacuum):
|
|||||||
unique_id,
|
unique_id,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the vacuum."""
|
"""Initialize the vacuum."""
|
||||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||||
AbstractTemplateVacuum.__init__(self, config)
|
AbstractTemplateVacuum.__init__(self, config)
|
||||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
|
||||||
self.entity_id = async_generate_entity_id(
|
|
||||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
|
||||||
)
|
|
||||||
name = self._attr_name
|
name = self._attr_name
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert name is not None
|
assert name is not None
|
||||||
|
@ -31,16 +31,10 @@ from homeassistant.components.weather import (
|
|||||||
WeatherEntity,
|
WeatherEntity,
|
||||||
WeatherEntityFeature,
|
WeatherEntityFeature,
|
||||||
)
|
)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import CONF_TEMPERATURE_UNIT, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||||
CONF_TEMPERATURE_UNIT,
|
|
||||||
CONF_UNIQUE_ID,
|
|
||||||
STATE_UNAVAILABLE,
|
|
||||||
STATE_UNKNOWN,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.helpers import config_validation as cv, template
|
from homeassistant.helpers import config_validation as cv, template
|
||||||
from homeassistant.helpers.entity import async_generate_entity_id
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity
|
from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
@ -52,11 +46,8 @@ from homeassistant.util.unit_conversion import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from .coordinator import TriggerUpdateCoordinator
|
from .coordinator import TriggerUpdateCoordinator
|
||||||
from .template_entity import (
|
from .helpers import async_setup_template_platform
|
||||||
TemplateEntity,
|
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||||
make_template_entity_common_modern_schema,
|
|
||||||
rewrite_common_legacy_to_modern_conf,
|
|
||||||
)
|
|
||||||
from .trigger_entity import TriggerEntity
|
from .trigger_entity import TriggerEntity
|
||||||
|
|
||||||
CHECK_FORECAST_KEYS = (
|
CHECK_FORECAST_KEYS = (
|
||||||
@ -138,33 +129,6 @@ WEATHER_SCHEMA = vol.Schema(
|
|||||||
PLATFORM_SCHEMA = WEATHER_PLATFORM_SCHEMA.extend(WEATHER_SCHEMA.schema)
|
PLATFORM_SCHEMA = WEATHER_PLATFORM_SCHEMA.extend(WEATHER_SCHEMA.schema)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_template_tracking_entities(
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
definitions: list[dict],
|
|
||||||
unique_id_prefix: str | None,
|
|
||||||
) -> None:
|
|
||||||
"""Create the weather entities."""
|
|
||||||
entities = []
|
|
||||||
|
|
||||||
for entity_conf in definitions:
|
|
||||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
|
||||||
|
|
||||||
if unique_id and unique_id_prefix:
|
|
||||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
|
||||||
|
|
||||||
entities.append(
|
|
||||||
WeatherTemplate(
|
|
||||||
hass,
|
|
||||||
entity_conf,
|
|
||||||
unique_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_add_entities(entities)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config: ConfigType,
|
config: ConfigType,
|
||||||
@ -172,39 +136,23 @@ async def async_setup_platform(
|
|||||||
discovery_info: DiscoveryInfoType | None = None,
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Template weather."""
|
"""Set up the Template weather."""
|
||||||
if discovery_info is None:
|
await async_setup_template_platform(
|
||||||
config = rewrite_common_legacy_to_modern_conf(hass, config)
|
|
||||||
unique_id = config.get(CONF_UNIQUE_ID)
|
|
||||||
async_add_entities(
|
|
||||||
[
|
|
||||||
WeatherTemplate(
|
|
||||||
hass,
|
hass,
|
||||||
|
WEATHER_DOMAIN,
|
||||||
config,
|
config,
|
||||||
unique_id,
|
StateWeatherEntity,
|
||||||
)
|
TriggerWeatherEntity,
|
||||||
]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
if "coordinator" in discovery_info:
|
|
||||||
async_add_entities(
|
|
||||||
TriggerWeatherEntity(hass, discovery_info["coordinator"], config)
|
|
||||||
for config in discovery_info["entities"]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
_async_create_template_tracking_entities(
|
|
||||||
async_add_entities,
|
async_add_entities,
|
||||||
hass,
|
discovery_info,
|
||||||
discovery_info["entities"],
|
{},
|
||||||
discovery_info["unique_id"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WeatherTemplate(TemplateEntity, WeatherEntity):
|
class StateWeatherEntity(TemplateEntity, WeatherEntity):
|
||||||
"""Representation of a weather condition."""
|
"""Representation of a weather condition."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -213,9 +161,8 @@ class WeatherTemplate(TemplateEntity, WeatherEntity):
|
|||||||
unique_id: str | None,
|
unique_id: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the Template weather."""
|
"""Initialize the Template weather."""
|
||||||
super().__init__(hass, config=config, unique_id=unique_id)
|
super().__init__(hass, config, unique_id)
|
||||||
|
|
||||||
name = self._attr_name
|
|
||||||
self._condition_template = config[CONF_CONDITION_TEMPLATE]
|
self._condition_template = config[CONF_CONDITION_TEMPLATE]
|
||||||
self._temperature_template = config[CONF_TEMPERATURE_TEMPLATE]
|
self._temperature_template = config[CONF_TEMPERATURE_TEMPLATE]
|
||||||
self._humidity_template = config[CONF_HUMIDITY_TEMPLATE]
|
self._humidity_template = config[CONF_HUMIDITY_TEMPLATE]
|
||||||
@ -243,8 +190,6 @@ class WeatherTemplate(TemplateEntity, WeatherEntity):
|
|||||||
self._attr_native_visibility_unit = config.get(CONF_VISIBILITY_UNIT)
|
self._attr_native_visibility_unit = config.get(CONF_VISIBILITY_UNIT)
|
||||||
self._attr_native_wind_speed_unit = config.get(CONF_WIND_SPEED_UNIT)
|
self._attr_native_wind_speed_unit = config.get(CONF_WIND_SPEED_UNIT)
|
||||||
|
|
||||||
self.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, name, hass=hass)
|
|
||||||
|
|
||||||
self._condition = None
|
self._condition = None
|
||||||
self._temperature = None
|
self._temperature = None
|
||||||
self._humidity = None
|
self._humidity = None
|
||||||
@ -538,6 +483,7 @@ class WeatherExtraStoredData(ExtraStoredData):
|
|||||||
class TriggerWeatherEntity(TriggerEntity, WeatherEntity, RestoreEntity):
|
class TriggerWeatherEntity(TriggerEntity, WeatherEntity, RestoreEntity):
|
||||||
"""Sensor entity based on trigger data."""
|
"""Sensor entity based on trigger data."""
|
||||||
|
|
||||||
|
_entity_id_format = ENTITY_ID_FORMAT
|
||||||
domain = WEATHER_DOMAIN
|
domain = WEATHER_DOMAIN
|
||||||
extra_template_keys = (
|
extra_template_keys = (
|
||||||
CONF_CONDITION_TEMPLATE,
|
CONF_CONDITION_TEMPLATE,
|
||||||
@ -553,6 +499,7 @@ class TriggerWeatherEntity(TriggerEntity, WeatherEntity, RestoreEntity):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(hass, coordinator, config)
|
super().__init__(hass, coordinator, config)
|
||||||
|
|
||||||
self._attr_native_precipitation_unit = config.get(CONF_PRECIPITATION_UNIT)
|
self._attr_native_precipitation_unit = config.get(CONF_PRECIPITATION_UNIT)
|
||||||
self._attr_native_pressure_unit = config.get(CONF_PRESSURE_UNIT)
|
self._attr_native_pressure_unit = config.get(CONF_PRESSURE_UNIT)
|
||||||
self._attr_native_temperature_unit = config.get(CONF_TEMPERATURE_UNIT)
|
self._attr_native_temperature_unit = config.get(CONF_TEMPERATURE_UNIT)
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
|
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["tesla-fleet-api"],
|
"loggers": ["tesla-fleet-api"],
|
||||||
"requirements": ["tesla-fleet-api==1.2.0"]
|
"requirements": ["tesla-fleet-api==1.2.2"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["tesla-fleet-api"],
|
"loggers": ["tesla-fleet-api"],
|
||||||
"requirements": ["tesla-fleet-api==1.2.0", "teslemetry-stream==0.7.9"]
|
"requirements": ["tesla-fleet-api==1.2.2", "teslemetry-stream==0.7.9"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["tessie", "tesla-fleet-api"],
|
"loggers": ["tessie", "tesla-fleet-api"],
|
||||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.2.0"]
|
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.2.2"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/touchline_sl",
|
"documentation": "https://www.home-assistant.io/integrations/touchline_sl",
|
||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["pytouchlinesl==0.3.0"]
|
"requirements": ["pytouchlinesl==0.4.0"]
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Callable
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
import logging
|
import logging
|
||||||
@ -417,8 +416,6 @@ class UnitOfMeasurement:
|
|||||||
device_classes: set[str]
|
device_classes: set[str]
|
||||||
|
|
||||||
aliases: set[str] = field(default_factory=set)
|
aliases: set[str] = field(default_factory=set)
|
||||||
conversion_unit: str | None = None
|
|
||||||
conversion_fn: Callable[[float], float] | None = None
|
|
||||||
|
|
||||||
|
|
||||||
# A tuple of available units of measurements we can work with.
|
# A tuple of available units of measurements we can work with.
|
||||||
@ -458,8 +455,6 @@ UNITS = (
|
|||||||
SensorDeviceClass.CO,
|
SensorDeviceClass.CO,
|
||||||
SensorDeviceClass.CO2,
|
SensorDeviceClass.CO2,
|
||||||
},
|
},
|
||||||
conversion_unit=CONCENTRATION_PARTS_PER_MILLION,
|
|
||||||
conversion_fn=lambda x: x / 1000,
|
|
||||||
),
|
),
|
||||||
UnitOfMeasurement(
|
UnitOfMeasurement(
|
||||||
unit=UnitOfElectricCurrent.AMPERE,
|
unit=UnitOfElectricCurrent.AMPERE,
|
||||||
@ -470,8 +465,6 @@ UNITS = (
|
|||||||
unit=UnitOfElectricCurrent.MILLIAMPERE,
|
unit=UnitOfElectricCurrent.MILLIAMPERE,
|
||||||
aliases={"ma", "milliampere"},
|
aliases={"ma", "milliampere"},
|
||||||
device_classes={SensorDeviceClass.CURRENT},
|
device_classes={SensorDeviceClass.CURRENT},
|
||||||
conversion_unit=UnitOfElectricCurrent.AMPERE,
|
|
||||||
conversion_fn=lambda x: x / 1000,
|
|
||||||
),
|
),
|
||||||
UnitOfMeasurement(
|
UnitOfMeasurement(
|
||||||
unit=UnitOfEnergy.WATT_HOUR,
|
unit=UnitOfEnergy.WATT_HOUR,
|
||||||
@ -527,8 +520,6 @@ UNITS = (
|
|||||||
SensorDeviceClass.SULPHUR_DIOXIDE,
|
SensorDeviceClass.SULPHUR_DIOXIDE,
|
||||||
SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||||
},
|
},
|
||||||
conversion_unit=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
|
||||||
conversion_fn=lambda x: x * 1000,
|
|
||||||
),
|
),
|
||||||
UnitOfMeasurement(
|
UnitOfMeasurement(
|
||||||
unit=UnitOfPower.WATT,
|
unit=UnitOfPower.WATT,
|
||||||
@ -596,8 +587,6 @@ UNITS = (
|
|||||||
unit=UnitOfElectricPotential.MILLIVOLT,
|
unit=UnitOfElectricPotential.MILLIVOLT,
|
||||||
aliases={"mv", "millivolt"},
|
aliases={"mv", "millivolt"},
|
||||||
device_classes={SensorDeviceClass.VOLTAGE},
|
device_classes={SensorDeviceClass.VOLTAGE},
|
||||||
conversion_unit=UnitOfElectricPotential.VOLT,
|
|
||||||
conversion_fn=lambda x: x / 1000,
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -382,20 +382,18 @@ class TuyaNumberEntity(TuyaEntity, NumberEntity):
|
|||||||
return
|
return
|
||||||
|
|
||||||
uoms = DEVICE_CLASS_UNITS[self.device_class]
|
uoms = DEVICE_CLASS_UNITS[self.device_class]
|
||||||
self._uom = uoms.get(self.native_unit_of_measurement) or uoms.get(
|
uom = uoms.get(self.native_unit_of_measurement) or uoms.get(
|
||||||
self.native_unit_of_measurement.lower()
|
self.native_unit_of_measurement.lower()
|
||||||
)
|
)
|
||||||
|
|
||||||
# Unknown unit of measurement, device class should not be used.
|
# Unknown unit of measurement, device class should not be used.
|
||||||
if self._uom is None:
|
if uom is None:
|
||||||
self._attr_device_class = None
|
self._attr_device_class = None
|
||||||
return
|
return
|
||||||
|
|
||||||
# Found unit of measurement, use the standardized Unit
|
# Found unit of measurement, use the standardized Unit
|
||||||
# Use the target conversion unit (if set)
|
# Use the target conversion unit (if set)
|
||||||
self._attr_native_unit_of_measurement = (
|
self._attr_native_unit_of_measurement = uom.unit
|
||||||
self._uom.conversion_unit or self._uom.unit
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_value(self) -> float | None:
|
def native_value(self) -> float | None:
|
||||||
|
@ -14,6 +14,8 @@ from homeassistant.components.sensor import (
|
|||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
)
|
)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
|
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
|
CONCENTRATION_PARTS_PER_MILLION,
|
||||||
PERCENTAGE,
|
PERCENTAGE,
|
||||||
EntityCategory,
|
EntityCategory,
|
||||||
UnitOfElectricCurrent,
|
UnitOfElectricCurrent,
|
||||||
@ -98,6 +100,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="current",
|
translation_key="current",
|
||||||
device_class=SensorDeviceClass.CURRENT,
|
device_class=SensorDeviceClass.CURRENT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
@ -112,6 +115,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="voltage",
|
translation_key="voltage",
|
||||||
device_class=SensorDeviceClass.VOLTAGE,
|
device_class=SensorDeviceClass.VOLTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -164,6 +168,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="carbon_dioxide",
|
translation_key="carbon_dioxide",
|
||||||
device_class=SensorDeviceClass.CO2,
|
device_class=SensorDeviceClass.CO2,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.CH2O_VALUE,
|
key=DPCode.CH2O_VALUE,
|
||||||
@ -181,6 +186,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="pm25",
|
translation_key="pm25",
|
||||||
device_class=SensorDeviceClass.PM25,
|
device_class=SensorDeviceClass.PM25,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
*BATTERY_SENSORS,
|
*BATTERY_SENSORS,
|
||||||
),
|
),
|
||||||
@ -192,6 +198,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="carbon_monoxide",
|
translation_key="carbon_monoxide",
|
||||||
device_class=SensorDeviceClass.CO,
|
device_class=SensorDeviceClass.CO,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
*BATTERY_SENSORS,
|
*BATTERY_SENSORS,
|
||||||
),
|
),
|
||||||
@ -278,18 +285,21 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="pm25",
|
translation_key="pm25",
|
||||||
device_class=SensorDeviceClass.PM25,
|
device_class=SensorDeviceClass.PM25,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.CO_VALUE,
|
key=DPCode.CO_VALUE,
|
||||||
translation_key="carbon_monoxide",
|
translation_key="carbon_monoxide",
|
||||||
device_class=SensorDeviceClass.CO,
|
device_class=SensorDeviceClass.CO,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.CO2_VALUE,
|
key=DPCode.CO2_VALUE,
|
||||||
translation_key="carbon_dioxide",
|
translation_key="carbon_dioxide",
|
||||||
device_class=SensorDeviceClass.CO2,
|
device_class=SensorDeviceClass.CO2,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.CH2O_VALUE,
|
key=DPCode.CH2O_VALUE,
|
||||||
@ -418,6 +428,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="current",
|
translation_key="current",
|
||||||
device_class=SensorDeviceClass.CURRENT,
|
device_class=SensorDeviceClass.CURRENT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
@ -432,6 +443,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="voltage",
|
translation_key="voltage",
|
||||||
device_class=SensorDeviceClass.VOLTAGE,
|
device_class=SensorDeviceClass.VOLTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -472,6 +484,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="carbon_dioxide",
|
translation_key="carbon_dioxide",
|
||||||
device_class=SensorDeviceClass.CO2,
|
device_class=SensorDeviceClass.CO2,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.CH2O_VALUE,
|
key=DPCode.CH2O_VALUE,
|
||||||
@ -489,12 +502,14 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="pm25",
|
translation_key="pm25",
|
||||||
device_class=SensorDeviceClass.PM25,
|
device_class=SensorDeviceClass.PM25,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.PM10,
|
key=DPCode.PM10,
|
||||||
translation_key="pm10",
|
translation_key="pm10",
|
||||||
device_class=SensorDeviceClass.PM10,
|
device_class=SensorDeviceClass.PM10,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
*BATTERY_SENSORS,
|
*BATTERY_SENSORS,
|
||||||
),
|
),
|
||||||
@ -506,6 +521,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="carbon_dioxide",
|
translation_key="carbon_dioxide",
|
||||||
device_class=SensorDeviceClass.CO2,
|
device_class=SensorDeviceClass.CO2,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.VOC_VALUE,
|
key=DPCode.VOC_VALUE,
|
||||||
@ -518,6 +534,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="pm25",
|
translation_key="pm25",
|
||||||
device_class=SensorDeviceClass.PM25,
|
device_class=SensorDeviceClass.PM25,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.VA_HUMIDITY,
|
key=DPCode.VA_HUMIDITY,
|
||||||
@ -583,6 +600,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="current",
|
translation_key="current",
|
||||||
device_class=SensorDeviceClass.CURRENT,
|
device_class=SensorDeviceClass.CURRENT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
@ -597,6 +615,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="voltage",
|
translation_key="voltage",
|
||||||
device_class=SensorDeviceClass.VOLTAGE,
|
device_class=SensorDeviceClass.VOLTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -613,6 +632,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="pm25",
|
translation_key="pm25",
|
||||||
device_class=SensorDeviceClass.PM25,
|
device_class=SensorDeviceClass.PM25,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.TEMP,
|
key=DPCode.TEMP,
|
||||||
@ -637,6 +657,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="concentration_carbon_dioxide",
|
translation_key="concentration_carbon_dioxide",
|
||||||
device_class=SensorDeviceClass.CO2,
|
device_class=SensorDeviceClass.CO2,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.TOTAL_TIME,
|
key=DPCode.TOTAL_TIME,
|
||||||
@ -685,6 +706,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="carbon_dioxide",
|
translation_key="carbon_dioxide",
|
||||||
device_class=SensorDeviceClass.CO2,
|
device_class=SensorDeviceClass.CO2,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
*BATTERY_SENSORS,
|
*BATTERY_SENSORS,
|
||||||
),
|
),
|
||||||
@ -724,6 +746,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="pm25",
|
translation_key="pm25",
|
||||||
device_class=SensorDeviceClass.PM25,
|
device_class=SensorDeviceClass.PM25,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.CH2O_VALUE,
|
key=DPCode.CH2O_VALUE,
|
||||||
@ -747,6 +770,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="carbon_dioxide",
|
translation_key="carbon_dioxide",
|
||||||
device_class=SensorDeviceClass.CO2,
|
device_class=SensorDeviceClass.CO2,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.HUMIDITY_VALUE,
|
key=DPCode.HUMIDITY_VALUE,
|
||||||
@ -759,12 +783,14 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="pm1",
|
translation_key="pm1",
|
||||||
device_class=SensorDeviceClass.PM1,
|
device_class=SensorDeviceClass.PM1,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.PM10,
|
key=DPCode.PM10,
|
||||||
translation_key="pm10",
|
translation_key="pm10",
|
||||||
device_class=SensorDeviceClass.PM10,
|
device_class=SensorDeviceClass.PM10,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
*BATTERY_SENSORS,
|
*BATTERY_SENSORS,
|
||||||
),
|
),
|
||||||
@ -945,6 +971,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="current",
|
translation_key="current",
|
||||||
device_class=SensorDeviceClass.CURRENT,
|
device_class=SensorDeviceClass.CURRENT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
@ -959,6 +986,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="voltage",
|
translation_key="voltage",
|
||||||
device_class=SensorDeviceClass.VOLTAGE,
|
device_class=SensorDeviceClass.VOLTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
@ -1004,12 +1032,14 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="carbon_dioxide",
|
translation_key="carbon_dioxide",
|
||||||
device_class=SensorDeviceClass.CO2,
|
device_class=SensorDeviceClass.CO2,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.PM25_VALUE,
|
key=DPCode.PM25_VALUE,
|
||||||
translation_key="pm25",
|
translation_key="pm25",
|
||||||
device_class=SensorDeviceClass.PM25,
|
device_class=SensorDeviceClass.PM25,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.CH2O_VALUE,
|
key=DPCode.CH2O_VALUE,
|
||||||
@ -1057,6 +1087,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="current",
|
translation_key="current",
|
||||||
device_class=SensorDeviceClass.CURRENT,
|
device_class=SensorDeviceClass.CURRENT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
@ -1071,6 +1102,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="voltage",
|
translation_key="voltage",
|
||||||
device_class=SensorDeviceClass.VOLTAGE,
|
device_class=SensorDeviceClass.VOLTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -1097,6 +1129,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="current",
|
translation_key="current",
|
||||||
device_class=SensorDeviceClass.CURRENT,
|
device_class=SensorDeviceClass.CURRENT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
@ -1113,6 +1146,7 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
|||||||
translation_key="voltage",
|
translation_key="voltage",
|
||||||
device_class=SensorDeviceClass.VOLTAGE,
|
device_class=SensorDeviceClass.VOLTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
@ -1415,20 +1449,18 @@ class TuyaSensorEntity(TuyaEntity, SensorEntity):
|
|||||||
return
|
return
|
||||||
|
|
||||||
uoms = DEVICE_CLASS_UNITS[self.device_class]
|
uoms = DEVICE_CLASS_UNITS[self.device_class]
|
||||||
self._uom = uoms.get(self.native_unit_of_measurement) or uoms.get(
|
uom = uoms.get(self.native_unit_of_measurement) or uoms.get(
|
||||||
self.native_unit_of_measurement.lower()
|
self.native_unit_of_measurement.lower()
|
||||||
)
|
)
|
||||||
|
|
||||||
# Unknown unit of measurement, device class should not be used.
|
# Unknown unit of measurement, device class should not be used.
|
||||||
if self._uom is None:
|
if uom is None:
|
||||||
self._attr_device_class = None
|
self._attr_device_class = None
|
||||||
return
|
return
|
||||||
|
|
||||||
# Found unit of measurement, use the standardized Unit
|
# Found unit of measurement, use the standardized Unit
|
||||||
# Use the target conversion unit (if set)
|
# Use the target conversion unit (if set)
|
||||||
self._attr_native_unit_of_measurement = (
|
self._attr_native_unit_of_measurement = uom.unit
|
||||||
self._uom.conversion_unit or self._uom.unit
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_value(self) -> StateType:
|
def native_value(self) -> StateType:
|
||||||
@ -1450,10 +1482,7 @@ class TuyaSensorEntity(TuyaEntity, SensorEntity):
|
|||||||
|
|
||||||
# Scale integer/float value
|
# Scale integer/float value
|
||||||
if isinstance(self._type_data, IntegerTypeData):
|
if isinstance(self._type_data, IntegerTypeData):
|
||||||
scaled_value = self._type_data.scale_value(value)
|
return self._type_data.scale_value(value)
|
||||||
if self._uom and self._uom.conversion_fn is not None:
|
|
||||||
return self._uom.conversion_fn(scaled_value)
|
|
||||||
return scaled_value
|
|
||||||
|
|
||||||
# Unexpected enum value
|
# Unexpected enum value
|
||||||
if (
|
if (
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -38,6 +39,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||||||
vol.Optional(CONF_API_KEY, default=""): str,
|
vol.Optional(CONF_API_KEY, default=""): str,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_API_KEY, default=""): str})
|
||||||
|
|
||||||
|
|
||||||
class UptimeKumaConfigFlow(ConfigFlow, domain=DOMAIN):
|
class UptimeKumaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
@ -77,3 +79,48 @@ class UptimeKumaConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
),
|
),
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Perform reauth upon an API authentication error."""
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Confirm reauthentication dialog."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
entry = self._get_reauth_entry()
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
session = async_get_clientsession(self.hass, entry.data[CONF_VERIFY_SSL])
|
||||||
|
uptime_kuma = UptimeKuma(
|
||||||
|
session,
|
||||||
|
entry.data[CONF_URL],
|
||||||
|
user_input[CONF_API_KEY],
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await uptime_kuma.metrics()
|
||||||
|
except UptimeKumaAuthenticationException:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
except UptimeKumaException:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
entry,
|
||||||
|
data_updates=user_input,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
data_schema=STEP_REAUTH_DATA_SCHEMA, suggested_values=user_input
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
@ -16,7 +16,7 @@ from pythonkuma import (
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import ConfigEntryError
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
@ -59,7 +59,7 @@ class UptimeKumaDataUpdateCoordinator(
|
|||||||
try:
|
try:
|
||||||
metrics = await self.api.metrics()
|
metrics = await self.api.metrics()
|
||||||
except UptimeKumaAuthenticationException as e:
|
except UptimeKumaAuthenticationException as e:
|
||||||
raise ConfigEntryError(
|
raise ConfigEntryAuthFailed(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="auth_failed_exception",
|
translation_key="auth_failed_exception",
|
||||||
) from e
|
) from e
|
||||||
|
23
homeassistant/components/uptime_kuma/diagnostics.py
Normal file
23
homeassistant/components/uptime_kuma/diagnostics.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
"""Diagnostics platform for Uptime Kuma."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import asdict
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.components.diagnostics import async_redact_data
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .coordinator import UptimeKumaConfigEntry
|
||||||
|
|
||||||
|
TO_REDACT = {"monitor_url", "monitor_hostname"}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant, entry: UptimeKumaConfigEntry
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
|
||||||
|
return async_redact_data(
|
||||||
|
{k: asdict(v) for k, v in entry.runtime_data.data.items()}, TO_REDACT
|
||||||
|
)
|
@ -7,5 +7,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["pythonkuma"],
|
"loggers": ["pythonkuma"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["pythonkuma==0.3.0"]
|
"requirements": ["pythonkuma==0.3.1"]
|
||||||
}
|
}
|
||||||
|
@ -38,12 +38,12 @@ rules:
|
|||||||
integration-owner: done
|
integration-owner: done
|
||||||
log-when-unavailable: done
|
log-when-unavailable: done
|
||||||
parallel-updates: done
|
parallel-updates: done
|
||||||
reauthentication-flow: todo
|
reauthentication-flow: done
|
||||||
test-coverage: done
|
test-coverage: done
|
||||||
|
|
||||||
# Gold
|
# Gold
|
||||||
devices: done
|
devices: done
|
||||||
diagnostics: todo
|
diagnostics: done
|
||||||
discovery-update-info:
|
discovery-update-info:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: is not locally discoverable
|
comment: is not locally discoverable
|
||||||
|
@ -13,6 +13,16 @@
|
|||||||
"verify_ssl": "Enable SSL certificate verification for secure connections. Disable only if connecting to an Uptime Kuma instance using a self-signed certificate or via IP address",
|
"verify_ssl": "Enable SSL certificate verification for secure connections. Disable only if connecting to an Uptime Kuma instance using a self-signed certificate or via IP address",
|
||||||
"api_key": "Enter an API key. To create a new API key navigate to **Settings → API Keys** and select **Add API Key**"
|
"api_key": "Enter an API key. To create a new API key navigate to **Settings → API Keys** and select **Add API Key**"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"reauth_confirm": {
|
||||||
|
"title": "Re-authenticate with Uptime Kuma: {name}",
|
||||||
|
"description": "The API key for **{name}** is invalid. To re-authenticate with Uptime Kuma provide a new API key below",
|
||||||
|
"data": {
|
||||||
|
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"api_key": "[%key:component::uptime_kuma::config::step::user::data_description::api_key%]"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
@ -21,7 +31,8 @@
|
|||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
@ -37,6 +37,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
PLATFORMS = [
|
PLATFORMS = [
|
||||||
Platform.BINARY_SENSOR,
|
Platform.BINARY_SENSOR,
|
||||||
|
Platform.FAN,
|
||||||
Platform.LIGHT,
|
Platform.LIGHT,
|
||||||
Platform.NUMBER,
|
Platform.NUMBER,
|
||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
|
139
homeassistant/components/wiz/fan.py
Normal file
139
homeassistant/components/wiz/fan.py
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
"""WiZ integration fan platform."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
from typing import Any, ClassVar
|
||||||
|
|
||||||
|
from pywizlight.bulblibrary import BulbType, Features
|
||||||
|
|
||||||
|
from homeassistant.components.fan import (
|
||||||
|
DIRECTION_FORWARD,
|
||||||
|
DIRECTION_REVERSE,
|
||||||
|
FanEntity,
|
||||||
|
FanEntityFeature,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.entity import ToggleEntity
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.util.percentage import (
|
||||||
|
percentage_to_ranged_value,
|
||||||
|
ranged_value_to_percentage,
|
||||||
|
)
|
||||||
|
|
||||||
|
from . import WizConfigEntry
|
||||||
|
from .entity import WizEntity
|
||||||
|
from .models import WizData
|
||||||
|
|
||||||
|
PRESET_MODE_BREEZE = "breeze"
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: WizConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the WiZ Platform from config_flow."""
|
||||||
|
if entry.runtime_data.bulb.bulbtype.features.fan:
|
||||||
|
async_add_entities([WizFanEntity(entry.runtime_data, entry.title)])
|
||||||
|
|
||||||
|
|
||||||
|
class WizFanEntity(WizEntity, FanEntity):
|
||||||
|
"""Representation of WiZ Light bulb."""
|
||||||
|
|
||||||
|
_attr_name = None
|
||||||
|
|
||||||
|
# We want the implementation of is_on to be the same as in ToggleEntity,
|
||||||
|
# but it is being overridden in FanEntity, so we need to restore it here.
|
||||||
|
is_on: ClassVar = ToggleEntity.is_on
|
||||||
|
|
||||||
|
def __init__(self, wiz_data: WizData, name: str) -> None:
|
||||||
|
"""Initialize a WiZ fan."""
|
||||||
|
super().__init__(wiz_data, name)
|
||||||
|
bulb_type: BulbType = self._device.bulbtype
|
||||||
|
features: Features = bulb_type.features
|
||||||
|
|
||||||
|
supported_features = (
|
||||||
|
FanEntityFeature.TURN_ON
|
||||||
|
| FanEntityFeature.TURN_OFF
|
||||||
|
| FanEntityFeature.SET_SPEED
|
||||||
|
)
|
||||||
|
if features.fan_reverse:
|
||||||
|
supported_features |= FanEntityFeature.DIRECTION
|
||||||
|
if features.fan_breeze_mode:
|
||||||
|
supported_features |= FanEntityFeature.PRESET_MODE
|
||||||
|
self._attr_preset_modes = [PRESET_MODE_BREEZE]
|
||||||
|
|
||||||
|
self._attr_supported_features = supported_features
|
||||||
|
self._attr_speed_count = bulb_type.fan_speed_range
|
||||||
|
|
||||||
|
self._async_update_attrs()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_update_attrs(self) -> None:
|
||||||
|
"""Handle updating _attr values."""
|
||||||
|
state = self._device.state
|
||||||
|
|
||||||
|
self._attr_is_on = state.get_fan_state() > 0
|
||||||
|
self._attr_percentage = ranged_value_to_percentage(
|
||||||
|
(1, self.speed_count), state.get_fan_speed()
|
||||||
|
)
|
||||||
|
if FanEntityFeature.PRESET_MODE in self.supported_features:
|
||||||
|
fan_mode = state.get_fan_mode()
|
||||||
|
self._attr_preset_mode = PRESET_MODE_BREEZE if fan_mode == 2 else None
|
||||||
|
if FanEntityFeature.DIRECTION in self.supported_features:
|
||||||
|
fan_reverse = state.get_fan_reverse()
|
||||||
|
self._attr_current_direction = None
|
||||||
|
if fan_reverse == 0:
|
||||||
|
self._attr_current_direction = DIRECTION_FORWARD
|
||||||
|
elif fan_reverse == 1:
|
||||||
|
self._attr_current_direction = DIRECTION_REVERSE
|
||||||
|
|
||||||
|
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||||
|
"""Set the preset mode of the fan."""
|
||||||
|
# preset_mode == PRESET_MODE_BREEZE
|
||||||
|
await self._device.fan_set_state(mode=2)
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
|
|
||||||
|
async def async_set_percentage(self, percentage: int) -> None:
|
||||||
|
"""Set the speed percentage of the fan."""
|
||||||
|
if percentage == 0:
|
||||||
|
await self.async_turn_off()
|
||||||
|
return
|
||||||
|
|
||||||
|
speed = math.ceil(percentage_to_ranged_value((1, self.speed_count), percentage))
|
||||||
|
await self._device.fan_set_state(mode=1, speed=speed)
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
|
|
||||||
|
async def async_turn_on(
|
||||||
|
self,
|
||||||
|
percentage: int | None = None,
|
||||||
|
preset_mode: str | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
"""Turn on the fan."""
|
||||||
|
mode: int | None = None
|
||||||
|
speed: int | None = None
|
||||||
|
if preset_mode is not None:
|
||||||
|
self._valid_preset_mode_or_raise(preset_mode)
|
||||||
|
if preset_mode == PRESET_MODE_BREEZE:
|
||||||
|
mode = 2
|
||||||
|
if percentage is not None:
|
||||||
|
speed = math.ceil(
|
||||||
|
percentage_to_ranged_value((1, self.speed_count), percentage)
|
||||||
|
)
|
||||||
|
if mode is None:
|
||||||
|
mode = 1
|
||||||
|
await self._device.fan_turn_on(mode=mode, speed=speed)
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn off the fan."""
|
||||||
|
await self._device.fan_turn_off(**kwargs)
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
|
|
||||||
|
async def async_set_direction(self, direction: str) -> None:
|
||||||
|
"""Set the direction of the fan."""
|
||||||
|
reverse = 1 if direction == DIRECTION_REVERSE else 0
|
||||||
|
await self._device.fan_set_state(reverse=reverse)
|
||||||
|
await self.coordinator.async_request_refresh()
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "wiz",
|
"domain": "wiz",
|
||||||
"name": "WiZ",
|
"name": "WiZ",
|
||||||
"codeowners": ["@sbidy"],
|
"codeowners": ["@sbidy", "@arturpragacz"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"dependencies": ["network"],
|
"dependencies": ["network"],
|
||||||
"dhcp": [
|
"dhcp": [
|
||||||
|
@ -165,6 +165,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
hass.data[DOMAIN][entry.entry_id] = YoLinkHomeStore(
|
hass.data[DOMAIN][entry.entry_id] = YoLinkHomeStore(
|
||||||
yolink_home, device_coordinators
|
yolink_home, device_coordinators
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Clean up yolink devices which are not associated to the account anymore.
|
||||||
|
device_registry = dr.async_get(hass)
|
||||||
|
device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id)
|
||||||
|
for device_entry in device_entries:
|
||||||
|
for identifier in device_entry.identifiers:
|
||||||
|
if (
|
||||||
|
identifier[0] == DOMAIN
|
||||||
|
and device_coordinators.get(identifier[1]) is None
|
||||||
|
):
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device_entry.id, remove_config_entry_id=entry.entry_id
|
||||||
|
)
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
async def async_yolink_unload(event) -> None:
|
async def async_yolink_unload(event) -> None:
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/youtube",
|
"documentation": "https://www.home-assistant.io/integrations/youtube",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["youtubeaio==1.1.5"]
|
"requirements": ["youtubeaio==2.0.0"]
|
||||||
}
|
}
|
||||||
|
@ -117,11 +117,8 @@ def _validate_supported_feature(supported_feature: str) -> int:
|
|||||||
raise vol.Invalid(f"Unknown supported feature '{supported_feature}'") from exc
|
raise vol.Invalid(f"Unknown supported feature '{supported_feature}'") from exc
|
||||||
|
|
||||||
|
|
||||||
def _validate_supported_features(supported_features: int | list[str]) -> int:
|
def _validate_supported_features(supported_features: list[str]) -> int:
|
||||||
"""Validate a supported feature and resolve an enum string to its value."""
|
"""Validate supported features and resolve enum strings to their value."""
|
||||||
|
|
||||||
if isinstance(supported_features, int):
|
|
||||||
return supported_features
|
|
||||||
|
|
||||||
feature_mask = 0
|
feature_mask = 0
|
||||||
|
|
||||||
@ -160,6 +157,22 @@ ENTITY_FILTER_SELECTOR_CONFIG_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Legacy entity selector config schema used directly under entity selectors
|
||||||
|
# is provided for backwards compatibility and remains feature frozen.
|
||||||
|
# New filtering features should be added under the `filter` key instead.
|
||||||
|
# https://github.com/home-assistant/frontend/pull/15302
|
||||||
|
LEGACY_ENTITY_SELECTOR_CONFIG_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
# Integration that provided the entity
|
||||||
|
vol.Optional("integration"): str,
|
||||||
|
# Domain the entity belongs to
|
||||||
|
vol.Optional("domain"): vol.All(cv.ensure_list, [str]),
|
||||||
|
# Device class of the entity
|
||||||
|
vol.Optional("device_class"): vol.All(cv.ensure_list, [str]),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class EntityFilterSelectorConfig(TypedDict, total=False):
|
class EntityFilterSelectorConfig(TypedDict, total=False):
|
||||||
"""Class to represent a single entity selector config."""
|
"""Class to represent a single entity selector config."""
|
||||||
|
|
||||||
@ -179,10 +192,22 @@ DEVICE_FILTER_SELECTOR_CONFIG_SCHEMA = vol.Schema(
|
|||||||
vol.Optional("model"): str,
|
vol.Optional("model"): str,
|
||||||
# Model ID of device
|
# Model ID of device
|
||||||
vol.Optional("model_id"): str,
|
vol.Optional("model_id"): str,
|
||||||
# Device has to contain entities matching this selector
|
}
|
||||||
vol.Optional("entity"): vol.All(
|
)
|
||||||
cv.ensure_list, [ENTITY_FILTER_SELECTOR_CONFIG_SCHEMA]
|
|
||||||
),
|
|
||||||
|
# Legacy device selector config schema used directly under device selectors
|
||||||
|
# is provided for backwards compatibility and remains feature frozen.
|
||||||
|
# New filtering features should be added under the `filter` key instead.
|
||||||
|
# https://github.com/home-assistant/frontend/pull/15302
|
||||||
|
LEGACY_DEVICE_SELECTOR_CONFIG_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
# Integration linked to it with a config entry
|
||||||
|
vol.Optional("integration"): str,
|
||||||
|
# Manufacturer of device
|
||||||
|
vol.Optional("manufacturer"): str,
|
||||||
|
# Model of device
|
||||||
|
vol.Optional("model"): str,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -714,9 +739,13 @@ class DeviceSelector(Selector[DeviceSelectorConfig]):
|
|||||||
selector_type = "device"
|
selector_type = "device"
|
||||||
|
|
||||||
CONFIG_SCHEMA = BASE_SELECTOR_CONFIG_SCHEMA.extend(
|
CONFIG_SCHEMA = BASE_SELECTOR_CONFIG_SCHEMA.extend(
|
||||||
DEVICE_FILTER_SELECTOR_CONFIG_SCHEMA.schema
|
LEGACY_DEVICE_SELECTOR_CONFIG_SCHEMA.schema
|
||||||
).extend(
|
).extend(
|
||||||
{
|
{
|
||||||
|
# Device has to contain entities matching this selector
|
||||||
|
vol.Optional("entity"): vol.All(
|
||||||
|
cv.ensure_list, [ENTITY_FILTER_SELECTOR_CONFIG_SCHEMA]
|
||||||
|
),
|
||||||
vol.Optional("multiple", default=False): cv.boolean,
|
vol.Optional("multiple", default=False): cv.boolean,
|
||||||
vol.Optional("filter"): vol.All(
|
vol.Optional("filter"): vol.All(
|
||||||
cv.ensure_list,
|
cv.ensure_list,
|
||||||
@ -794,7 +823,7 @@ class EntitySelector(Selector[EntitySelectorConfig]):
|
|||||||
selector_type = "entity"
|
selector_type = "entity"
|
||||||
|
|
||||||
CONFIG_SCHEMA = BASE_SELECTOR_CONFIG_SCHEMA.extend(
|
CONFIG_SCHEMA = BASE_SELECTOR_CONFIG_SCHEMA.extend(
|
||||||
ENTITY_FILTER_SELECTOR_CONFIG_SCHEMA.schema
|
LEGACY_ENTITY_SELECTOR_CONFIG_SCHEMA.schema
|
||||||
).extend(
|
).extend(
|
||||||
{
|
{
|
||||||
vol.Optional("exclude_entities"): [str],
|
vol.Optional("exclude_entities"): [str],
|
||||||
|
@ -45,7 +45,7 @@ ifaddr==0.2.0
|
|||||||
Jinja2==3.1.6
|
Jinja2==3.1.6
|
||||||
lru-dict==1.3.0
|
lru-dict==1.3.0
|
||||||
mutagen==1.47.0
|
mutagen==1.47.0
|
||||||
orjson==3.10.18
|
orjson==3.11.0
|
||||||
packaging>=23.1
|
packaging>=23.1
|
||||||
paho-mqtt==2.1.0
|
paho-mqtt==2.1.0
|
||||||
Pillow==11.3.0
|
Pillow==11.3.0
|
||||||
|
@ -61,7 +61,7 @@ dependencies = [
|
|||||||
"Pillow==11.3.0",
|
"Pillow==11.3.0",
|
||||||
"propcache==0.3.2",
|
"propcache==0.3.2",
|
||||||
"pyOpenSSL==25.1.0",
|
"pyOpenSSL==25.1.0",
|
||||||
"orjson==3.10.18",
|
"orjson==3.11.0",
|
||||||
"packaging>=23.1",
|
"packaging>=23.1",
|
||||||
"psutil-home-assistant==0.0.1",
|
"psutil-home-assistant==0.0.1",
|
||||||
"python-slugify==8.0.4",
|
"python-slugify==8.0.4",
|
||||||
|
2
requirements.txt
generated
2
requirements.txt
generated
@ -33,7 +33,7 @@ cryptography==45.0.3
|
|||||||
Pillow==11.3.0
|
Pillow==11.3.0
|
||||||
propcache==0.3.2
|
propcache==0.3.2
|
||||||
pyOpenSSL==25.1.0
|
pyOpenSSL==25.1.0
|
||||||
orjson==3.10.18
|
orjson==3.11.0
|
||||||
packaging>=23.1
|
packaging>=23.1
|
||||||
psutil-home-assistant==0.0.1
|
psutil-home-assistant==0.0.1
|
||||||
python-slugify==8.0.4
|
python-slugify==8.0.4
|
||||||
|
18
requirements_all.txt
generated
18
requirements_all.txt
generated
@ -179,7 +179,7 @@ aioacaia==0.1.14
|
|||||||
aioairq==0.4.6
|
aioairq==0.4.6
|
||||||
|
|
||||||
# homeassistant.components.airzone_cloud
|
# homeassistant.components.airzone_cloud
|
||||||
aioairzone-cloud==0.6.12
|
aioairzone-cloud==0.6.13
|
||||||
|
|
||||||
# homeassistant.components.airzone
|
# homeassistant.components.airzone
|
||||||
aioairzone==1.0.0
|
aioairzone==1.0.0
|
||||||
@ -471,7 +471,7 @@ altruistclient==0.1.1
|
|||||||
amberelectric==2.0.12
|
amberelectric==2.0.12
|
||||||
|
|
||||||
# homeassistant.components.amcrest
|
# homeassistant.components.amcrest
|
||||||
amcrest==1.9.8
|
amcrest==1.9.9
|
||||||
|
|
||||||
# homeassistant.components.androidtv
|
# homeassistant.components.androidtv
|
||||||
androidtv[async]==0.0.75
|
androidtv[async]==0.0.75
|
||||||
@ -845,7 +845,7 @@ eheimdigital==1.3.0
|
|||||||
electrickiwi-api==0.9.14
|
electrickiwi-api==0.9.14
|
||||||
|
|
||||||
# homeassistant.components.elevenlabs
|
# homeassistant.components.elevenlabs
|
||||||
elevenlabs==1.9.0
|
elevenlabs==2.3.0
|
||||||
|
|
||||||
# homeassistant.components.elgato
|
# homeassistant.components.elgato
|
||||||
elgato==5.1.2
|
elgato==5.1.2
|
||||||
@ -1174,7 +1174,7 @@ home-assistant-frontend==20250702.2
|
|||||||
home-assistant-intents==2025.6.23
|
home-assistant-intents==2025.6.23
|
||||||
|
|
||||||
# homeassistant.components.homematicip_cloud
|
# homeassistant.components.homematicip_cloud
|
||||||
homematicip==2.0.6
|
homematicip==2.0.7
|
||||||
|
|
||||||
# homeassistant.components.horizon
|
# homeassistant.components.horizon
|
||||||
horimote==0.4.1
|
horimote==0.4.1
|
||||||
@ -1234,7 +1234,7 @@ ihcsdk==2.8.5
|
|||||||
imeon_inverter_api==0.3.12
|
imeon_inverter_api==0.3.12
|
||||||
|
|
||||||
# homeassistant.components.imgw_pib
|
# homeassistant.components.imgw_pib
|
||||||
imgw_pib==1.2.0
|
imgw_pib==1.4.0
|
||||||
|
|
||||||
# homeassistant.components.incomfort
|
# homeassistant.components.incomfort
|
||||||
incomfort-client==0.6.9
|
incomfort-client==0.6.9
|
||||||
@ -2526,7 +2526,7 @@ python-vlc==3.0.18122
|
|||||||
pythonegardia==1.0.52
|
pythonegardia==1.0.52
|
||||||
|
|
||||||
# homeassistant.components.uptime_kuma
|
# homeassistant.components.uptime_kuma
|
||||||
pythonkuma==0.3.0
|
pythonkuma==0.3.1
|
||||||
|
|
||||||
# homeassistant.components.tile
|
# homeassistant.components.tile
|
||||||
pytile==2024.12.0
|
pytile==2024.12.0
|
||||||
@ -2538,7 +2538,7 @@ pytomorrowio==0.3.6
|
|||||||
pytouchline_extended==0.4.5
|
pytouchline_extended==0.4.5
|
||||||
|
|
||||||
# homeassistant.components.touchline_sl
|
# homeassistant.components.touchline_sl
|
||||||
pytouchlinesl==0.3.0
|
pytouchlinesl==0.4.0
|
||||||
|
|
||||||
# homeassistant.components.traccar
|
# homeassistant.components.traccar
|
||||||
# homeassistant.components.traccar_server
|
# homeassistant.components.traccar_server
|
||||||
@ -2907,7 +2907,7 @@ temperusb==1.6.1
|
|||||||
# homeassistant.components.tesla_fleet
|
# homeassistant.components.tesla_fleet
|
||||||
# homeassistant.components.teslemetry
|
# homeassistant.components.teslemetry
|
||||||
# homeassistant.components.tessie
|
# homeassistant.components.tessie
|
||||||
tesla-fleet-api==1.2.0
|
tesla-fleet-api==1.2.2
|
||||||
|
|
||||||
# homeassistant.components.powerwall
|
# homeassistant.components.powerwall
|
||||||
tesla-powerwall==0.5.2
|
tesla-powerwall==0.5.2
|
||||||
@ -3172,7 +3172,7 @@ yolink-api==0.5.7
|
|||||||
youless-api==2.2.0
|
youless-api==2.2.0
|
||||||
|
|
||||||
# homeassistant.components.youtube
|
# homeassistant.components.youtube
|
||||||
youtubeaio==1.1.5
|
youtubeaio==2.0.0
|
||||||
|
|
||||||
# homeassistant.components.media_extractor
|
# homeassistant.components.media_extractor
|
||||||
yt-dlp[default]==2025.06.09
|
yt-dlp[default]==2025.06.09
|
||||||
|
16
requirements_test_all.txt
generated
16
requirements_test_all.txt
generated
@ -167,7 +167,7 @@ aioacaia==0.1.14
|
|||||||
aioairq==0.4.6
|
aioairq==0.4.6
|
||||||
|
|
||||||
# homeassistant.components.airzone_cloud
|
# homeassistant.components.airzone_cloud
|
||||||
aioairzone-cloud==0.6.12
|
aioairzone-cloud==0.6.13
|
||||||
|
|
||||||
# homeassistant.components.airzone
|
# homeassistant.components.airzone
|
||||||
aioairzone==1.0.0
|
aioairzone==1.0.0
|
||||||
@ -736,7 +736,7 @@ eheimdigital==1.3.0
|
|||||||
electrickiwi-api==0.9.14
|
electrickiwi-api==0.9.14
|
||||||
|
|
||||||
# homeassistant.components.elevenlabs
|
# homeassistant.components.elevenlabs
|
||||||
elevenlabs==1.9.0
|
elevenlabs==2.3.0
|
||||||
|
|
||||||
# homeassistant.components.elgato
|
# homeassistant.components.elgato
|
||||||
elgato==5.1.2
|
elgato==5.1.2
|
||||||
@ -1023,7 +1023,7 @@ home-assistant-frontend==20250702.2
|
|||||||
home-assistant-intents==2025.6.23
|
home-assistant-intents==2025.6.23
|
||||||
|
|
||||||
# homeassistant.components.homematicip_cloud
|
# homeassistant.components.homematicip_cloud
|
||||||
homematicip==2.0.6
|
homematicip==2.0.7
|
||||||
|
|
||||||
# homeassistant.components.remember_the_milk
|
# homeassistant.components.remember_the_milk
|
||||||
httplib2==0.20.4
|
httplib2==0.20.4
|
||||||
@ -1068,7 +1068,7 @@ igloohome-api==0.1.1
|
|||||||
imeon_inverter_api==0.3.12
|
imeon_inverter_api==0.3.12
|
||||||
|
|
||||||
# homeassistant.components.imgw_pib
|
# homeassistant.components.imgw_pib
|
||||||
imgw_pib==1.2.0
|
imgw_pib==1.4.0
|
||||||
|
|
||||||
# homeassistant.components.incomfort
|
# homeassistant.components.incomfort
|
||||||
incomfort-client==0.6.9
|
incomfort-client==0.6.9
|
||||||
@ -2090,7 +2090,7 @@ python-technove==2.0.0
|
|||||||
python-telegram-bot[socks]==21.5
|
python-telegram-bot[socks]==21.5
|
||||||
|
|
||||||
# homeassistant.components.uptime_kuma
|
# homeassistant.components.uptime_kuma
|
||||||
pythonkuma==0.3.0
|
pythonkuma==0.3.1
|
||||||
|
|
||||||
# homeassistant.components.tile
|
# homeassistant.components.tile
|
||||||
pytile==2024.12.0
|
pytile==2024.12.0
|
||||||
@ -2099,7 +2099,7 @@ pytile==2024.12.0
|
|||||||
pytomorrowio==0.3.6
|
pytomorrowio==0.3.6
|
||||||
|
|
||||||
# homeassistant.components.touchline_sl
|
# homeassistant.components.touchline_sl
|
||||||
pytouchlinesl==0.3.0
|
pytouchlinesl==0.4.0
|
||||||
|
|
||||||
# homeassistant.components.traccar
|
# homeassistant.components.traccar
|
||||||
# homeassistant.components.traccar_server
|
# homeassistant.components.traccar_server
|
||||||
@ -2393,7 +2393,7 @@ temperusb==1.6.1
|
|||||||
# homeassistant.components.tesla_fleet
|
# homeassistant.components.tesla_fleet
|
||||||
# homeassistant.components.teslemetry
|
# homeassistant.components.teslemetry
|
||||||
# homeassistant.components.tessie
|
# homeassistant.components.tessie
|
||||||
tesla-fleet-api==1.2.0
|
tesla-fleet-api==1.2.2
|
||||||
|
|
||||||
# homeassistant.components.powerwall
|
# homeassistant.components.powerwall
|
||||||
tesla-powerwall==0.5.2
|
tesla-powerwall==0.5.2
|
||||||
@ -2619,7 +2619,7 @@ yolink-api==0.5.7
|
|||||||
youless-api==2.2.0
|
youless-api==2.2.0
|
||||||
|
|
||||||
# homeassistant.components.youtube
|
# homeassistant.components.youtube
|
||||||
youtubeaio==1.1.5
|
youtubeaio==2.0.0
|
||||||
|
|
||||||
# homeassistant.components.media_extractor
|
# homeassistant.components.media_extractor
|
||||||
yt-dlp[default]==2025.06.09
|
yt-dlp[default]==2025.06.09
|
||||||
|
@ -117,7 +117,6 @@ async def test_generate_data_service(
|
|||||||
for msg_attachment, attachment in zip(
|
for msg_attachment, attachment in zip(
|
||||||
msg_attachments, task.attachments or [], strict=False
|
msg_attachments, task.attachments or [], strict=False
|
||||||
):
|
):
|
||||||
assert attachment.url == "http://example.com/media.mp4"
|
|
||||||
assert attachment.mime_type == "video/mp4"
|
assert attachment.mime_type == "video/mp4"
|
||||||
assert attachment.media_content_id == msg_attachment["media_content_id"]
|
assert attachment.media_content_id == msg_attachment["media_content_id"]
|
||||||
assert attachment.path == Path("media.mp4")
|
assert attachment.path == Path("media.mp4")
|
||||||
|
@ -1,18 +1,26 @@
|
|||||||
"""Test tasks for the AI Task integration."""
|
"""Test tasks for the AI Task integration."""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
import pytest
|
import pytest
|
||||||
from syrupy.assertion import SnapshotAssertion
|
from syrupy.assertion import SnapshotAssertion
|
||||||
|
|
||||||
|
from homeassistant.components import media_source
|
||||||
from homeassistant.components.ai_task import AITaskEntityFeature, async_generate_data
|
from homeassistant.components.ai_task import AITaskEntityFeature, async_generate_data
|
||||||
|
from homeassistant.components.camera import Image
|
||||||
from homeassistant.components.conversation import async_get_chat_log
|
from homeassistant.components.conversation import async_get_chat_log
|
||||||
from homeassistant.const import STATE_UNKNOWN
|
from homeassistant.const import STATE_UNKNOWN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import chat_session
|
from homeassistant.helpers import chat_session
|
||||||
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from .conftest import TEST_ENTITY_ID, MockAITaskEntity
|
from .conftest import TEST_ENTITY_ID, MockAITaskEntity
|
||||||
|
|
||||||
|
from tests.common import async_fire_time_changed
|
||||||
from tests.typing import WebSocketGenerator
|
from tests.typing import WebSocketGenerator
|
||||||
|
|
||||||
|
|
||||||
@ -154,3 +162,83 @@ async def test_generate_data_attachments_not_supported(
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_generate_data_mixed_attachments(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
init_components: None,
|
||||||
|
mock_ai_task_entity: MockAITaskEntity,
|
||||||
|
) -> None:
|
||||||
|
"""Test generating data with both camera and regular media source attachments."""
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.camera.async_get_image",
|
||||||
|
return_value=Image(content_type="image/jpeg", content=b"fake_camera_jpeg"),
|
||||||
|
) as mock_get_image,
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.media_source.async_resolve_media",
|
||||||
|
return_value=media_source.PlayMedia(
|
||||||
|
url="http://example.com/test.mp4",
|
||||||
|
mime_type="video/mp4",
|
||||||
|
path=Path("/media/test.mp4"),
|
||||||
|
),
|
||||||
|
) as mock_resolve_media,
|
||||||
|
):
|
||||||
|
await async_generate_data(
|
||||||
|
hass,
|
||||||
|
task_name="Test Task",
|
||||||
|
entity_id=TEST_ENTITY_ID,
|
||||||
|
instructions="Analyze these files",
|
||||||
|
attachments=[
|
||||||
|
{
|
||||||
|
"media_content_id": "media-source://camera/camera.front_door",
|
||||||
|
"media_content_type": "image/jpeg",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"media_content_id": "media-source://media_player/video.mp4",
|
||||||
|
"media_content_type": "video/mp4",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify both methods were called
|
||||||
|
mock_get_image.assert_called_once_with(hass, "camera.front_door")
|
||||||
|
mock_resolve_media.assert_called_once_with(
|
||||||
|
hass, "media-source://media_player/video.mp4", None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check attachments
|
||||||
|
assert len(mock_ai_task_entity.mock_generate_data_tasks) == 1
|
||||||
|
task = mock_ai_task_entity.mock_generate_data_tasks[0]
|
||||||
|
assert task.attachments is not None
|
||||||
|
assert len(task.attachments) == 2
|
||||||
|
|
||||||
|
# Check camera attachment
|
||||||
|
camera_attachment = task.attachments[0]
|
||||||
|
assert (
|
||||||
|
camera_attachment.media_content_id == "media-source://camera/camera.front_door"
|
||||||
|
)
|
||||||
|
assert camera_attachment.mime_type == "image/jpeg"
|
||||||
|
assert isinstance(camera_attachment.path, Path)
|
||||||
|
assert camera_attachment.path.suffix == ".jpg"
|
||||||
|
|
||||||
|
# Verify camera snapshot content
|
||||||
|
assert camera_attachment.path.exists()
|
||||||
|
content = await hass.async_add_executor_job(camera_attachment.path.read_bytes)
|
||||||
|
assert content == b"fake_camera_jpeg"
|
||||||
|
|
||||||
|
# Trigger clean up
|
||||||
|
async_fire_time_changed(
|
||||||
|
hass,
|
||||||
|
dt_util.utcnow() + chat_session.CONVERSATION_TIMEOUT + timedelta(seconds=1),
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Verify the temporary file cleaned up
|
||||||
|
assert not camera_attachment.path.exists()
|
||||||
|
|
||||||
|
# Check regular media attachment
|
||||||
|
media_attachment = task.attachments[1]
|
||||||
|
assert media_attachment.media_content_id == "media-source://media_player/video.mp4"
|
||||||
|
assert media_attachment.mime_type == "video/mp4"
|
||||||
|
assert media_attachment.path == Path("/media/test.mp4")
|
||||||
|
@ -1 +1,13 @@
|
|||||||
"""Tests for the amberelectric integration."""
|
"""Tests for the amberelectric integration."""
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
|
|
||||||
|
async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None:
|
||||||
|
"""Fixture for setting up the component."""
|
||||||
|
config_entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
@ -1,10 +1,59 @@
|
|||||||
"""Provide common Amber fixtures."""
|
"""Provide common Amber fixtures."""
|
||||||
|
|
||||||
from collections.abc import Generator
|
from collections.abc import AsyncGenerator, Generator
|
||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import AsyncMock, Mock, patch
|
||||||
|
|
||||||
|
from amberelectric.models.interval import Interval
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from homeassistant.components.amberelectric.const import (
|
||||||
|
CONF_SITE_ID,
|
||||||
|
CONF_SITE_NAME,
|
||||||
|
DOMAIN,
|
||||||
|
)
|
||||||
|
from homeassistant.const import CONF_API_TOKEN
|
||||||
|
|
||||||
|
from .helpers import (
|
||||||
|
CONTROLLED_LOAD_CHANNEL,
|
||||||
|
FEED_IN_CHANNEL,
|
||||||
|
FORECASTS,
|
||||||
|
GENERAL_AND_CONTROLLED_SITE_ID,
|
||||||
|
GENERAL_AND_FEED_IN_SITE_ID,
|
||||||
|
GENERAL_CHANNEL,
|
||||||
|
GENERAL_CHANNEL_WITH_RANGE,
|
||||||
|
GENERAL_FORECASTS,
|
||||||
|
GENERAL_ONLY_SITE_ID,
|
||||||
|
)
|
||||||
|
|
||||||
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
|
MOCK_API_TOKEN = "psk_0000000000000000"
|
||||||
|
|
||||||
|
|
||||||
|
def create_amber_config_entry(
|
||||||
|
site_id: str, entry_id: str, name: str
|
||||||
|
) -> MockConfigEntry:
|
||||||
|
"""Create an Amber config entry."""
|
||||||
|
return MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={
|
||||||
|
CONF_API_TOKEN: MOCK_API_TOKEN,
|
||||||
|
CONF_SITE_NAME: name,
|
||||||
|
CONF_SITE_ID: site_id,
|
||||||
|
},
|
||||||
|
entry_id=entry_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_amber_client() -> Generator[AsyncMock]:
|
||||||
|
"""Mock the Amber API client."""
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.amberelectric.amberelectric.AmberApi",
|
||||||
|
autospec=True,
|
||||||
|
) as mock_client:
|
||||||
|
yield mock_client
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_setup_entry() -> Generator[AsyncMock]:
|
def mock_setup_entry() -> Generator[AsyncMock]:
|
||||||
@ -13,3 +62,129 @@ def mock_setup_entry() -> Generator[AsyncMock]:
|
|||||||
"homeassistant.components.amberelectric.async_setup_entry", return_value=True
|
"homeassistant.components.amberelectric.async_setup_entry", return_value=True
|
||||||
) as mock_setup_entry:
|
) as mock_setup_entry:
|
||||||
yield mock_setup_entry
|
yield mock_setup_entry
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def general_channel_config_entry():
|
||||||
|
"""Generate the default Amber config entry."""
|
||||||
|
return create_amber_config_entry(GENERAL_ONLY_SITE_ID, GENERAL_ONLY_SITE_ID, "home")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def general_channel_and_controlled_load_config_entry():
|
||||||
|
"""Generate the default Amber config entry for site with controlled load."""
|
||||||
|
return create_amber_config_entry(
|
||||||
|
GENERAL_AND_CONTROLLED_SITE_ID, GENERAL_AND_CONTROLLED_SITE_ID, "home"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def general_channel_and_feed_in_config_entry():
|
||||||
|
"""Generate the default Amber config entry for site with feed in."""
|
||||||
|
return create_amber_config_entry(
|
||||||
|
GENERAL_AND_FEED_IN_SITE_ID, GENERAL_AND_FEED_IN_SITE_ID, "home"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def general_channel_prices() -> list[Interval]:
|
||||||
|
"""List containing general channel prices."""
|
||||||
|
return GENERAL_CHANNEL
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def general_channel_prices_with_range() -> list[Interval]:
|
||||||
|
"""List containing general channel prices."""
|
||||||
|
return GENERAL_CHANNEL_WITH_RANGE
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def controlled_load_channel_prices() -> list[Interval]:
|
||||||
|
"""List containing controlled load channel prices."""
|
||||||
|
return CONTROLLED_LOAD_CHANNEL
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def feed_in_channel_prices() -> list[Interval]:
|
||||||
|
"""List containing feed in channel prices."""
|
||||||
|
return FEED_IN_CHANNEL
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def forecast_prices() -> list[Interval]:
|
||||||
|
"""List containing forecasts with advanced prices."""
|
||||||
|
return FORECASTS
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def general_forecast_prices() -> list[Interval]:
|
||||||
|
"""List containing forecasts with advanced prices."""
|
||||||
|
return GENERAL_FORECASTS
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_amber_client_general_channel(
|
||||||
|
mock_amber_client: AsyncMock, general_channel_prices: list[Interval]
|
||||||
|
) -> Generator[AsyncMock]:
|
||||||
|
"""Fake general channel prices."""
|
||||||
|
client = mock_amber_client.return_value
|
||||||
|
client.get_current_prices.return_value = general_channel_prices
|
||||||
|
return mock_amber_client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_amber_client_general_channel_with_range(
|
||||||
|
mock_amber_client: AsyncMock, general_channel_prices_with_range: list[Interval]
|
||||||
|
) -> Generator[AsyncMock]:
|
||||||
|
"""Fake general channel prices with a range."""
|
||||||
|
client = mock_amber_client.return_value
|
||||||
|
client.get_current_prices.return_value = general_channel_prices_with_range
|
||||||
|
return mock_amber_client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_amber_client_general_and_controlled_load(
|
||||||
|
mock_amber_client: AsyncMock,
|
||||||
|
general_channel_prices: list[Interval],
|
||||||
|
controlled_load_channel_prices: list[Interval],
|
||||||
|
) -> Generator[AsyncMock]:
|
||||||
|
"""Fake general channel and controlled load channel prices."""
|
||||||
|
client = mock_amber_client.return_value
|
||||||
|
client.get_current_prices.return_value = (
|
||||||
|
general_channel_prices + controlled_load_channel_prices
|
||||||
|
)
|
||||||
|
return mock_amber_client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def mock_amber_client_general_and_feed_in(
|
||||||
|
mock_amber_client: AsyncMock,
|
||||||
|
general_channel_prices: list[Interval],
|
||||||
|
feed_in_channel_prices: list[Interval],
|
||||||
|
) -> AsyncGenerator[Mock]:
|
||||||
|
"""Set up general channel and feed in channel."""
|
||||||
|
client = mock_amber_client.return_value
|
||||||
|
client.get_current_prices.return_value = (
|
||||||
|
general_channel_prices + feed_in_channel_prices
|
||||||
|
)
|
||||||
|
return mock_amber_client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def mock_amber_client_forecasts(
|
||||||
|
mock_amber_client: AsyncMock, forecast_prices: list[Interval]
|
||||||
|
) -> AsyncGenerator[Mock]:
|
||||||
|
"""Set up general channel, controlled load and feed in channel."""
|
||||||
|
client = mock_amber_client.return_value
|
||||||
|
client.get_current_prices.return_value = forecast_prices
|
||||||
|
return mock_amber_client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def mock_amber_client_general_forecasts(
|
||||||
|
mock_amber_client: AsyncMock, general_forecast_prices: list[Interval]
|
||||||
|
) -> AsyncGenerator[Mock]:
|
||||||
|
"""Set up general channel only."""
|
||||||
|
client = mock_amber_client.return_value
|
||||||
|
client.get_current_prices.return_value = general_forecast_prices
|
||||||
|
return mock_amber_client
|
||||||
|
@ -3,11 +3,13 @@
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
from amberelectric.models.actual_interval import ActualInterval
|
from amberelectric.models.actual_interval import ActualInterval
|
||||||
|
from amberelectric.models.advanced_price import AdvancedPrice
|
||||||
from amberelectric.models.channel import ChannelType
|
from amberelectric.models.channel import ChannelType
|
||||||
from amberelectric.models.current_interval import CurrentInterval
|
from amberelectric.models.current_interval import CurrentInterval
|
||||||
from amberelectric.models.forecast_interval import ForecastInterval
|
from amberelectric.models.forecast_interval import ForecastInterval
|
||||||
from amberelectric.models.interval import Interval
|
from amberelectric.models.interval import Interval
|
||||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||||
|
from amberelectric.models.range import Range
|
||||||
from amberelectric.models.spike_status import SpikeStatus
|
from amberelectric.models.spike_status import SpikeStatus
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
|
|
||||||
@ -15,12 +17,16 @@ from dateutil import parser
|
|||||||
def generate_actual_interval(channel_type: ChannelType, end_time: datetime) -> Interval:
|
def generate_actual_interval(channel_type: ChannelType, end_time: datetime) -> Interval:
|
||||||
"""Generate a mock actual interval."""
|
"""Generate a mock actual interval."""
|
||||||
start_time = end_time - timedelta(minutes=30)
|
start_time = end_time - timedelta(minutes=30)
|
||||||
|
if channel_type == ChannelType.CONTROLLEDLOAD:
|
||||||
|
per_kwh = 4.4
|
||||||
|
if channel_type == ChannelType.FEEDIN:
|
||||||
|
per_kwh = 1.1
|
||||||
return Interval(
|
return Interval(
|
||||||
ActualInterval(
|
ActualInterval(
|
||||||
type="ActualInterval",
|
type="ActualInterval",
|
||||||
duration=30,
|
duration=30,
|
||||||
spot_per_kwh=1.0,
|
spot_per_kwh=1.0,
|
||||||
per_kwh=8.0,
|
per_kwh=per_kwh,
|
||||||
date=start_time.date(),
|
date=start_time.date(),
|
||||||
nem_time=end_time,
|
nem_time=end_time,
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
@ -34,16 +40,23 @@ def generate_actual_interval(channel_type: ChannelType, end_time: datetime) -> I
|
|||||||
|
|
||||||
|
|
||||||
def generate_current_interval(
|
def generate_current_interval(
|
||||||
channel_type: ChannelType, end_time: datetime
|
channel_type: ChannelType,
|
||||||
|
end_time: datetime,
|
||||||
|
range=False,
|
||||||
) -> Interval:
|
) -> Interval:
|
||||||
"""Generate a mock current price."""
|
"""Generate a mock current price."""
|
||||||
start_time = end_time - timedelta(minutes=30)
|
start_time = end_time - timedelta(minutes=30)
|
||||||
return Interval(
|
per_kwh = 8.8
|
||||||
|
if channel_type == ChannelType.CONTROLLEDLOAD:
|
||||||
|
per_kwh = 4.4
|
||||||
|
if channel_type == ChannelType.FEEDIN:
|
||||||
|
per_kwh = 1.1
|
||||||
|
interval = Interval(
|
||||||
CurrentInterval(
|
CurrentInterval(
|
||||||
type="CurrentInterval",
|
type="CurrentInterval",
|
||||||
duration=30,
|
duration=30,
|
||||||
spot_per_kwh=1.0,
|
spot_per_kwh=1.0,
|
||||||
per_kwh=8.0,
|
per_kwh=per_kwh,
|
||||||
date=start_time.date(),
|
date=start_time.date(),
|
||||||
nem_time=end_time,
|
nem_time=end_time,
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
@ -56,18 +69,28 @@ def generate_current_interval(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if range:
|
||||||
|
interval.actual_instance.range = Range(min=6.7, max=9.1)
|
||||||
|
|
||||||
|
return interval
|
||||||
|
|
||||||
|
|
||||||
def generate_forecast_interval(
|
def generate_forecast_interval(
|
||||||
channel_type: ChannelType, end_time: datetime
|
channel_type: ChannelType, end_time: datetime, range=False, advanced_price=False
|
||||||
) -> Interval:
|
) -> Interval:
|
||||||
"""Generate a mock forecast interval."""
|
"""Generate a mock forecast interval."""
|
||||||
start_time = end_time - timedelta(minutes=30)
|
start_time = end_time - timedelta(minutes=30)
|
||||||
return Interval(
|
per_kwh = 8.8
|
||||||
|
if channel_type == ChannelType.CONTROLLEDLOAD:
|
||||||
|
per_kwh = 4.4
|
||||||
|
if channel_type == ChannelType.FEEDIN:
|
||||||
|
per_kwh = 1.1
|
||||||
|
interval = Interval(
|
||||||
ForecastInterval(
|
ForecastInterval(
|
||||||
type="ForecastInterval",
|
type="ForecastInterval",
|
||||||
duration=30,
|
duration=30,
|
||||||
spot_per_kwh=1.1,
|
spot_per_kwh=1.1,
|
||||||
per_kwh=8.8,
|
per_kwh=per_kwh,
|
||||||
date=start_time.date(),
|
date=start_time.date(),
|
||||||
nem_time=end_time,
|
nem_time=end_time,
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
@ -79,12 +102,20 @@ def generate_forecast_interval(
|
|||||||
estimate=True,
|
estimate=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
if range:
|
||||||
|
interval.actual_instance.range = Range(min=6.7, max=9.1)
|
||||||
|
if advanced_price:
|
||||||
|
interval.actual_instance.advanced_price = AdvancedPrice(
|
||||||
|
low=6.7, predicted=9.0, high=10.2
|
||||||
|
)
|
||||||
|
return interval
|
||||||
|
|
||||||
|
|
||||||
GENERAL_ONLY_SITE_ID = "01FG2K6V5TB6X9W0EWPPMZD6MJ"
|
GENERAL_ONLY_SITE_ID = "01FG2K6V5TB6X9W0EWPPMZD6MJ"
|
||||||
GENERAL_AND_CONTROLLED_SITE_ID = "01FG2MC8RF7GBC4KJXP3YFZ162"
|
GENERAL_AND_CONTROLLED_SITE_ID = "01FG2MC8RF7GBC4KJXP3YFZ162"
|
||||||
GENERAL_AND_FEED_IN_SITE_ID = "01FG2MCD8KTRZR9MNNW84VP50S"
|
GENERAL_AND_FEED_IN_SITE_ID = "01FG2MCD8KTRZR9MNNW84VP50S"
|
||||||
GENERAL_AND_CONTROLLED_FEED_IN_SITE_ID = "01FG2MCD8KTRZR9MNNW847S50S"
|
GENERAL_AND_CONTROLLED_FEED_IN_SITE_ID = "01FG2MCD8KTRZR9MNNW847S50S"
|
||||||
|
GENERAL_FOR_FAIL = "01JVCEYVSD5HGJG0KT7RNM91GG"
|
||||||
|
|
||||||
GENERAL_CHANNEL = [
|
GENERAL_CHANNEL = [
|
||||||
generate_current_interval(
|
generate_current_interval(
|
||||||
@ -101,6 +132,21 @@ GENERAL_CHANNEL = [
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
GENERAL_CHANNEL_WITH_RANGE = [
|
||||||
|
generate_current_interval(
|
||||||
|
ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00"), range=True
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL, parser.parse("2021-09-21T09:00:00+10:00"), range=True
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL, parser.parse("2021-09-21T09:30:00+10:00"), range=True
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL, parser.parse("2021-09-21T10:00:00+10:00"), range=True
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
CONTROLLED_LOAD_CHANNEL = [
|
CONTROLLED_LOAD_CHANNEL = [
|
||||||
generate_current_interval(
|
generate_current_interval(
|
||||||
ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T08:30:00+10:00")
|
ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T08:30:00+10:00")
|
||||||
@ -131,3 +177,93 @@ FEED_IN_CHANNEL = [
|
|||||||
ChannelType.FEEDIN, parser.parse("2021-09-21T10:00:00+10:00")
|
ChannelType.FEEDIN, parser.parse("2021-09-21T10:00:00+10:00")
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
GENERAL_FORECASTS = [
|
||||||
|
generate_current_interval(
|
||||||
|
ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00")
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL,
|
||||||
|
parser.parse("2021-09-21T09:00:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL,
|
||||||
|
parser.parse("2021-09-21T09:30:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL,
|
||||||
|
parser.parse("2021-09-21T10:00:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
FORECASTS = [
|
||||||
|
generate_current_interval(
|
||||||
|
ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00")
|
||||||
|
),
|
||||||
|
generate_current_interval(
|
||||||
|
ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T08:30:00+10:00")
|
||||||
|
),
|
||||||
|
generate_current_interval(
|
||||||
|
ChannelType.FEEDIN, parser.parse("2021-09-21T08:30:00+10:00")
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL,
|
||||||
|
parser.parse("2021-09-21T09:00:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL,
|
||||||
|
parser.parse("2021-09-21T09:30:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.GENERAL,
|
||||||
|
parser.parse("2021-09-21T10:00:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.CONTROLLEDLOAD,
|
||||||
|
parser.parse("2021-09-21T09:00:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.CONTROLLEDLOAD,
|
||||||
|
parser.parse("2021-09-21T09:30:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.CONTROLLEDLOAD,
|
||||||
|
parser.parse("2021-09-21T10:00:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.FEEDIN,
|
||||||
|
parser.parse("2021-09-21T09:00:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.FEEDIN,
|
||||||
|
parser.parse("2021-09-21T09:30:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
generate_forecast_interval(
|
||||||
|
ChannelType.FEEDIN,
|
||||||
|
parser.parse("2021-09-21T10:00:00+10:00"),
|
||||||
|
range=True,
|
||||||
|
advanced_price=True,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
@ -9,7 +9,6 @@ from unittest.mock import Mock, patch
|
|||||||
from amberelectric import ApiException
|
from amberelectric import ApiException
|
||||||
from amberelectric.models.channel import Channel, ChannelType
|
from amberelectric.models.channel import Channel, ChannelType
|
||||||
from amberelectric.models.interval import Interval
|
from amberelectric.models.interval import Interval
|
||||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
|
||||||
from amberelectric.models.site import Site
|
from amberelectric.models.site import Site
|
||||||
from amberelectric.models.site_status import SiteStatus
|
from amberelectric.models.site_status import SiteStatus
|
||||||
from amberelectric.models.spike_status import SpikeStatus
|
from amberelectric.models.spike_status import SpikeStatus
|
||||||
@ -17,10 +16,7 @@ from dateutil import parser
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.amberelectric.const import CONF_SITE_ID, CONF_SITE_NAME
|
from homeassistant.components.amberelectric.const import CONF_SITE_ID, CONF_SITE_NAME
|
||||||
from homeassistant.components.amberelectric.coordinator import (
|
from homeassistant.components.amberelectric.coordinator import AmberUpdateCoordinator
|
||||||
AmberUpdateCoordinator,
|
|
||||||
normalize_descriptor,
|
|
||||||
)
|
|
||||||
from homeassistant.const import CONF_API_TOKEN
|
from homeassistant.const import CONF_API_TOKEN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||||
@ -98,18 +94,6 @@ def mock_api_current_price() -> Generator:
|
|||||||
yield instance
|
yield instance
|
||||||
|
|
||||||
|
|
||||||
def test_normalize_descriptor() -> None:
|
|
||||||
"""Test normalizing descriptors works correctly."""
|
|
||||||
assert normalize_descriptor(None) is None
|
|
||||||
assert normalize_descriptor(PriceDescriptor.NEGATIVE) == "negative"
|
|
||||||
assert normalize_descriptor(PriceDescriptor.EXTREMELYLOW) == "extremely_low"
|
|
||||||
assert normalize_descriptor(PriceDescriptor.VERYLOW) == "very_low"
|
|
||||||
assert normalize_descriptor(PriceDescriptor.LOW) == "low"
|
|
||||||
assert normalize_descriptor(PriceDescriptor.NEUTRAL) == "neutral"
|
|
||||||
assert normalize_descriptor(PriceDescriptor.HIGH) == "high"
|
|
||||||
assert normalize_descriptor(PriceDescriptor.SPIKE) == "spike"
|
|
||||||
|
|
||||||
|
|
||||||
async def test_fetch_general_site(hass: HomeAssistant, current_price_api: Mock) -> None:
|
async def test_fetch_general_site(hass: HomeAssistant, current_price_api: Mock) -> None:
|
||||||
"""Test fetching a site with only a general channel."""
|
"""Test fetching a site with only a general channel."""
|
||||||
|
|
||||||
@ -120,7 +104,7 @@ async def test_fetch_general_site(hass: HomeAssistant, current_price_api: Mock)
|
|||||||
result = await data_service._async_update_data()
|
result = await data_service._async_update_data()
|
||||||
|
|
||||||
current_price_api.get_current_prices.assert_called_with(
|
current_price_api.get_current_prices.assert_called_with(
|
||||||
GENERAL_ONLY_SITE_ID, next=48
|
GENERAL_ONLY_SITE_ID, next=288
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance
|
assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance
|
||||||
@ -152,7 +136,7 @@ async def test_fetch_no_general_site(
|
|||||||
await data_service._async_update_data()
|
await data_service._async_update_data()
|
||||||
|
|
||||||
current_price_api.get_current_prices.assert_called_with(
|
current_price_api.get_current_prices.assert_called_with(
|
||||||
GENERAL_ONLY_SITE_ID, next=48
|
GENERAL_ONLY_SITE_ID, next=288
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -166,7 +150,7 @@ async def test_fetch_api_error(hass: HomeAssistant, current_price_api: Mock) ->
|
|||||||
result = await data_service._async_update_data()
|
result = await data_service._async_update_data()
|
||||||
|
|
||||||
current_price_api.get_current_prices.assert_called_with(
|
current_price_api.get_current_prices.assert_called_with(
|
||||||
GENERAL_ONLY_SITE_ID, next=48
|
GENERAL_ONLY_SITE_ID, next=288
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance
|
assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance
|
||||||
@ -217,7 +201,7 @@ async def test_fetch_general_and_controlled_load_site(
|
|||||||
result = await data_service._async_update_data()
|
result = await data_service._async_update_data()
|
||||||
|
|
||||||
current_price_api.get_current_prices.assert_called_with(
|
current_price_api.get_current_prices.assert_called_with(
|
||||||
GENERAL_AND_CONTROLLED_SITE_ID, next=48
|
GENERAL_AND_CONTROLLED_SITE_ID, next=288
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance
|
assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance
|
||||||
@ -257,7 +241,7 @@ async def test_fetch_general_and_feed_in_site(
|
|||||||
result = await data_service._async_update_data()
|
result = await data_service._async_update_data()
|
||||||
|
|
||||||
current_price_api.get_current_prices.assert_called_with(
|
current_price_api.get_current_prices.assert_called_with(
|
||||||
GENERAL_AND_FEED_IN_SITE_ID, next=48
|
GENERAL_AND_FEED_IN_SITE_ID, next=288
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance
|
assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance
|
||||||
|
17
tests/components/amberelectric/test_helpers.py
Normal file
17
tests/components/amberelectric/test_helpers.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
"""Test formatters."""
|
||||||
|
|
||||||
|
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||||
|
|
||||||
|
from homeassistant.components.amberelectric.helpers import normalize_descriptor
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_descriptor() -> None:
|
||||||
|
"""Test normalizing descriptors works correctly."""
|
||||||
|
assert normalize_descriptor(None) is None
|
||||||
|
assert normalize_descriptor(PriceDescriptor.NEGATIVE) == "negative"
|
||||||
|
assert normalize_descriptor(PriceDescriptor.EXTREMELYLOW) == "extremely_low"
|
||||||
|
assert normalize_descriptor(PriceDescriptor.VERYLOW) == "very_low"
|
||||||
|
assert normalize_descriptor(PriceDescriptor.LOW) == "low"
|
||||||
|
assert normalize_descriptor(PriceDescriptor.NEUTRAL) == "neutral"
|
||||||
|
assert normalize_descriptor(PriceDescriptor.HIGH) == "high"
|
||||||
|
assert normalize_descriptor(PriceDescriptor.SPIKE) == "spike"
|
@ -1,119 +1,26 @@
|
|||||||
"""Test the Amber Electric Sensors."""
|
"""Test the Amber Electric Sensors."""
|
||||||
|
|
||||||
from collections.abc import AsyncGenerator
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
from amberelectric.models.current_interval import CurrentInterval
|
|
||||||
from amberelectric.models.interval import Interval
|
|
||||||
from amberelectric.models.range import Range
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.amberelectric.const import (
|
|
||||||
CONF_SITE_ID,
|
|
||||||
CONF_SITE_NAME,
|
|
||||||
DOMAIN,
|
|
||||||
)
|
|
||||||
from homeassistant.const import CONF_API_TOKEN
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.setup import async_setup_component
|
|
||||||
|
|
||||||
from .helpers import (
|
from . import MockConfigEntry, setup_integration
|
||||||
CONTROLLED_LOAD_CHANNEL,
|
|
||||||
FEED_IN_CHANNEL,
|
|
||||||
GENERAL_AND_CONTROLLED_SITE_ID,
|
|
||||||
GENERAL_AND_FEED_IN_SITE_ID,
|
|
||||||
GENERAL_CHANNEL,
|
|
||||||
GENERAL_ONLY_SITE_ID,
|
|
||||||
)
|
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
|
||||||
|
|
||||||
MOCK_API_TOKEN = "psk_0000000000000000"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.mark.usefixtures("mock_amber_client_general_channel")
|
||||||
async def setup_general(hass: HomeAssistant) -> AsyncGenerator[Mock]:
|
async def test_general_price_sensor(
|
||||||
"""Set up general channel."""
|
hass: HomeAssistant, general_channel_config_entry: MockConfigEntry
|
||||||
MockConfigEntry(
|
) -> None:
|
||||||
domain="amberelectric",
|
|
||||||
data={
|
|
||||||
CONF_SITE_NAME: "mock_title",
|
|
||||||
CONF_API_TOKEN: MOCK_API_TOKEN,
|
|
||||||
CONF_SITE_ID: GENERAL_ONLY_SITE_ID,
|
|
||||||
},
|
|
||||||
).add_to_hass(hass)
|
|
||||||
|
|
||||||
instance = Mock()
|
|
||||||
with patch(
|
|
||||||
"amberelectric.AmberApi",
|
|
||||||
return_value=instance,
|
|
||||||
) as mock_update:
|
|
||||||
instance.get_current_prices = Mock(return_value=GENERAL_CHANNEL)
|
|
||||||
assert await async_setup_component(hass, DOMAIN, {})
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
yield mock_update.return_value
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
async def setup_general_and_controlled_load(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
) -> AsyncGenerator[Mock]:
|
|
||||||
"""Set up general channel and controller load channel."""
|
|
||||||
MockConfigEntry(
|
|
||||||
domain="amberelectric",
|
|
||||||
data={
|
|
||||||
CONF_API_TOKEN: MOCK_API_TOKEN,
|
|
||||||
CONF_SITE_ID: GENERAL_AND_CONTROLLED_SITE_ID,
|
|
||||||
},
|
|
||||||
).add_to_hass(hass)
|
|
||||||
|
|
||||||
instance = Mock()
|
|
||||||
with patch(
|
|
||||||
"amberelectric.AmberApi",
|
|
||||||
return_value=instance,
|
|
||||||
) as mock_update:
|
|
||||||
instance.get_current_prices = Mock(
|
|
||||||
return_value=GENERAL_CHANNEL + CONTROLLED_LOAD_CHANNEL
|
|
||||||
)
|
|
||||||
assert await async_setup_component(hass, DOMAIN, {})
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
yield mock_update.return_value
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
async def setup_general_and_feed_in(hass: HomeAssistant) -> AsyncGenerator[Mock]:
|
|
||||||
"""Set up general channel and feed in channel."""
|
|
||||||
MockConfigEntry(
|
|
||||||
domain="amberelectric",
|
|
||||||
data={
|
|
||||||
CONF_API_TOKEN: MOCK_API_TOKEN,
|
|
||||||
CONF_SITE_ID: GENERAL_AND_FEED_IN_SITE_ID,
|
|
||||||
},
|
|
||||||
).add_to_hass(hass)
|
|
||||||
|
|
||||||
instance = Mock()
|
|
||||||
with patch(
|
|
||||||
"amberelectric.AmberApi",
|
|
||||||
return_value=instance,
|
|
||||||
) as mock_update:
|
|
||||||
instance.get_current_prices = Mock(
|
|
||||||
return_value=GENERAL_CHANNEL + FEED_IN_CHANNEL
|
|
||||||
)
|
|
||||||
assert await async_setup_component(hass, DOMAIN, {})
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
yield mock_update.return_value
|
|
||||||
|
|
||||||
|
|
||||||
async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> None:
|
|
||||||
"""Test the General Price sensor."""
|
"""Test the General Price sensor."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
assert len(hass.states.async_all()) == 6
|
assert len(hass.states.async_all()) == 6
|
||||||
price = hass.states.get("sensor.mock_title_general_price")
|
price = hass.states.get("sensor.mock_title_general_price")
|
||||||
assert price
|
assert price
|
||||||
assert price.state == "0.08"
|
assert price.state == "0.09"
|
||||||
attributes = price.attributes
|
attributes = price.attributes
|
||||||
assert attributes["duration"] == 30
|
assert attributes["duration"] == 30
|
||||||
assert attributes["date"] == "2021-09-21"
|
assert attributes["date"] == "2021-09-21"
|
||||||
assert attributes["per_kwh"] == 0.08
|
assert attributes["per_kwh"] == 0.09
|
||||||
assert attributes["nem_date"] == "2021-09-21T08:30:00+10:00"
|
assert attributes["nem_date"] == "2021-09-21T08:30:00+10:00"
|
||||||
assert attributes["spot_per_kwh"] == 0.01
|
assert attributes["spot_per_kwh"] == 0.01
|
||||||
assert attributes["start_time"] == "2021-09-21T08:00:00+10:00"
|
assert attributes["start_time"] == "2021-09-21T08:00:00+10:00"
|
||||||
@ -126,32 +33,36 @@ async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) ->
|
|||||||
assert attributes.get("range_min") is None
|
assert attributes.get("range_min") is None
|
||||||
assert attributes.get("range_max") is None
|
assert attributes.get("range_max") is None
|
||||||
|
|
||||||
with_range: list[CurrentInterval] = GENERAL_CHANNEL
|
|
||||||
with_range[0].actual_instance.range = Range(min=7.8, max=12.4)
|
|
||||||
|
|
||||||
setup_general.get_current_price.return_value = with_range
|
|
||||||
config_entry = hass.config_entries.async_entries(DOMAIN)[0]
|
|
||||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_general_channel_with_range")
|
||||||
|
async def test_general_price_sensor_with_range(
|
||||||
|
hass: HomeAssistant, general_channel_config_entry: MockConfigEntry
|
||||||
|
) -> None:
|
||||||
|
"""Test the General Price sensor with a range."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
|
assert len(hass.states.async_all()) == 6
|
||||||
price = hass.states.get("sensor.mock_title_general_price")
|
price = hass.states.get("sensor.mock_title_general_price")
|
||||||
assert price
|
assert price
|
||||||
attributes = price.attributes
|
attributes = price.attributes
|
||||||
assert attributes.get("range_min") == 0.08
|
assert attributes.get("range_min") == 0.07
|
||||||
assert attributes.get("range_max") == 0.12
|
assert attributes.get("range_max") == 0.09
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_general_and_controlled_load")
|
@pytest.mark.usefixtures("mock_amber_client_general_and_controlled_load")
|
||||||
async def test_general_and_controlled_load_price_sensor(hass: HomeAssistant) -> None:
|
async def test_general_and_controlled_load_price_sensor(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
general_channel_and_controlled_load_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
"""Test the Controlled Price sensor."""
|
"""Test the Controlled Price sensor."""
|
||||||
|
await setup_integration(hass, general_channel_and_controlled_load_config_entry)
|
||||||
assert len(hass.states.async_all()) == 9
|
assert len(hass.states.async_all()) == 9
|
||||||
price = hass.states.get("sensor.mock_title_controlled_load_price")
|
price = hass.states.get("sensor.mock_title_controlled_load_price")
|
||||||
assert price
|
assert price
|
||||||
assert price.state == "0.08"
|
assert price.state == "0.04"
|
||||||
attributes = price.attributes
|
attributes = price.attributes
|
||||||
assert attributes["duration"] == 30
|
assert attributes["duration"] == 30
|
||||||
assert attributes["date"] == "2021-09-21"
|
assert attributes["date"] == "2021-09-21"
|
||||||
assert attributes["per_kwh"] == 0.08
|
assert attributes["per_kwh"] == 0.04
|
||||||
assert attributes["nem_date"] == "2021-09-21T08:30:00+10:00"
|
assert attributes["nem_date"] == "2021-09-21T08:30:00+10:00"
|
||||||
assert attributes["spot_per_kwh"] == 0.01
|
assert attributes["spot_per_kwh"] == 0.01
|
||||||
assert attributes["start_time"] == "2021-09-21T08:00:00+10:00"
|
assert attributes["start_time"] == "2021-09-21T08:00:00+10:00"
|
||||||
@ -163,17 +74,20 @@ async def test_general_and_controlled_load_price_sensor(hass: HomeAssistant) ->
|
|||||||
assert attributes["attribution"] == "Data provided by Amber Electric"
|
assert attributes["attribution"] == "Data provided by Amber Electric"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_general_and_feed_in")
|
@pytest.mark.usefixtures("mock_amber_client_general_and_feed_in")
|
||||||
async def test_general_and_feed_in_price_sensor(hass: HomeAssistant) -> None:
|
async def test_general_and_feed_in_price_sensor(
|
||||||
|
hass: HomeAssistant, general_channel_and_feed_in_config_entry: MockConfigEntry
|
||||||
|
) -> None:
|
||||||
"""Test the Feed In sensor."""
|
"""Test the Feed In sensor."""
|
||||||
|
await setup_integration(hass, general_channel_and_feed_in_config_entry)
|
||||||
assert len(hass.states.async_all()) == 9
|
assert len(hass.states.async_all()) == 9
|
||||||
price = hass.states.get("sensor.mock_title_feed_in_price")
|
price = hass.states.get("sensor.mock_title_feed_in_price")
|
||||||
assert price
|
assert price
|
||||||
assert price.state == "-0.08"
|
assert price.state == "-0.01"
|
||||||
attributes = price.attributes
|
attributes = price.attributes
|
||||||
assert attributes["duration"] == 30
|
assert attributes["duration"] == 30
|
||||||
assert attributes["date"] == "2021-09-21"
|
assert attributes["date"] == "2021-09-21"
|
||||||
assert attributes["per_kwh"] == -0.08
|
assert attributes["per_kwh"] == -0.01
|
||||||
assert attributes["nem_date"] == "2021-09-21T08:30:00+10:00"
|
assert attributes["nem_date"] == "2021-09-21T08:30:00+10:00"
|
||||||
assert attributes["spot_per_kwh"] == 0.01
|
assert attributes["spot_per_kwh"] == 0.01
|
||||||
assert attributes["start_time"] == "2021-09-21T08:00:00+10:00"
|
assert attributes["start_time"] == "2021-09-21T08:00:00+10:00"
|
||||||
@ -185,10 +99,12 @@ async def test_general_and_feed_in_price_sensor(hass: HomeAssistant) -> None:
|
|||||||
assert attributes["attribution"] == "Data provided by Amber Electric"
|
assert attributes["attribution"] == "Data provided by Amber Electric"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_general_channel")
|
||||||
async def test_general_forecast_sensor(
|
async def test_general_forecast_sensor(
|
||||||
hass: HomeAssistant, setup_general: Mock
|
hass: HomeAssistant, general_channel_config_entry: MockConfigEntry
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the General Forecast sensor."""
|
"""Test the General Forecast sensor."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
assert len(hass.states.async_all()) == 6
|
assert len(hass.states.async_all()) == 6
|
||||||
price = hass.states.get("sensor.mock_title_general_forecast")
|
price = hass.states.get("sensor.mock_title_general_forecast")
|
||||||
assert price
|
assert price
|
||||||
@ -212,29 +128,33 @@ async def test_general_forecast_sensor(
|
|||||||
assert first_forecast.get("range_min") is None
|
assert first_forecast.get("range_min") is None
|
||||||
assert first_forecast.get("range_max") is None
|
assert first_forecast.get("range_max") is None
|
||||||
|
|
||||||
with_range: list[Interval] = GENERAL_CHANNEL
|
|
||||||
with_range[1].actual_instance.range = Range(min=7.8, max=12.4)
|
|
||||||
|
|
||||||
setup_general.get_current_price.return_value = with_range
|
|
||||||
config_entry = hass.config_entries.async_entries(DOMAIN)[0]
|
|
||||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_general_channel_with_range")
|
||||||
|
async def test_general_forecast_sensor_with_range(
|
||||||
|
hass: HomeAssistant, general_channel_config_entry: MockConfigEntry
|
||||||
|
) -> None:
|
||||||
|
"""Test the General Forecast sensor with a range."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
|
assert len(hass.states.async_all()) == 6
|
||||||
price = hass.states.get("sensor.mock_title_general_forecast")
|
price = hass.states.get("sensor.mock_title_general_forecast")
|
||||||
assert price
|
assert price
|
||||||
attributes = price.attributes
|
attributes = price.attributes
|
||||||
first_forecast = attributes["forecasts"][0]
|
first_forecast = attributes["forecasts"][0]
|
||||||
assert first_forecast.get("range_min") == 0.08
|
assert first_forecast.get("range_min") == 0.07
|
||||||
assert first_forecast.get("range_max") == 0.12
|
assert first_forecast.get("range_max") == 0.09
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_general_and_controlled_load")
|
@pytest.mark.usefixtures("mock_amber_client_general_and_controlled_load")
|
||||||
async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None:
|
async def test_controlled_load_forecast_sensor(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
general_channel_and_controlled_load_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
"""Test the Controlled Load Forecast sensor."""
|
"""Test the Controlled Load Forecast sensor."""
|
||||||
|
await setup_integration(hass, general_channel_and_controlled_load_config_entry)
|
||||||
assert len(hass.states.async_all()) == 9
|
assert len(hass.states.async_all()) == 9
|
||||||
price = hass.states.get("sensor.mock_title_controlled_load_forecast")
|
price = hass.states.get("sensor.mock_title_controlled_load_forecast")
|
||||||
assert price
|
assert price
|
||||||
assert price.state == "0.09"
|
assert price.state == "0.04"
|
||||||
attributes = price.attributes
|
attributes = price.attributes
|
||||||
assert attributes["channel_type"] == "controlledLoad"
|
assert attributes["channel_type"] == "controlledLoad"
|
||||||
assert attributes["attribution"] == "Data provided by Amber Electric"
|
assert attributes["attribution"] == "Data provided by Amber Electric"
|
||||||
@ -242,7 +162,7 @@ async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None:
|
|||||||
first_forecast = attributes["forecasts"][0]
|
first_forecast = attributes["forecasts"][0]
|
||||||
assert first_forecast["duration"] == 30
|
assert first_forecast["duration"] == 30
|
||||||
assert first_forecast["date"] == "2021-09-21"
|
assert first_forecast["date"] == "2021-09-21"
|
||||||
assert first_forecast["per_kwh"] == 0.09
|
assert first_forecast["per_kwh"] == 0.04
|
||||||
assert first_forecast["nem_date"] == "2021-09-21T09:00:00+10:00"
|
assert first_forecast["nem_date"] == "2021-09-21T09:00:00+10:00"
|
||||||
assert first_forecast["spot_per_kwh"] == 0.01
|
assert first_forecast["spot_per_kwh"] == 0.01
|
||||||
assert first_forecast["start_time"] == "2021-09-21T08:30:00+10:00"
|
assert first_forecast["start_time"] == "2021-09-21T08:30:00+10:00"
|
||||||
@ -252,13 +172,16 @@ async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None:
|
|||||||
assert first_forecast["descriptor"] == "very_low"
|
assert first_forecast["descriptor"] == "very_low"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_general_and_feed_in")
|
@pytest.mark.usefixtures("mock_amber_client_general_and_feed_in")
|
||||||
async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None:
|
async def test_feed_in_forecast_sensor(
|
||||||
|
hass: HomeAssistant, general_channel_and_feed_in_config_entry: MockConfigEntry
|
||||||
|
) -> None:
|
||||||
"""Test the Feed In Forecast sensor."""
|
"""Test the Feed In Forecast sensor."""
|
||||||
|
await setup_integration(hass, general_channel_and_feed_in_config_entry)
|
||||||
assert len(hass.states.async_all()) == 9
|
assert len(hass.states.async_all()) == 9
|
||||||
price = hass.states.get("sensor.mock_title_feed_in_forecast")
|
price = hass.states.get("sensor.mock_title_feed_in_forecast")
|
||||||
assert price
|
assert price
|
||||||
assert price.state == "-0.09"
|
assert price.state == "-0.01"
|
||||||
attributes = price.attributes
|
attributes = price.attributes
|
||||||
assert attributes["channel_type"] == "feedIn"
|
assert attributes["channel_type"] == "feedIn"
|
||||||
assert attributes["attribution"] == "Data provided by Amber Electric"
|
assert attributes["attribution"] == "Data provided by Amber Electric"
|
||||||
@ -266,7 +189,7 @@ async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None:
|
|||||||
first_forecast = attributes["forecasts"][0]
|
first_forecast = attributes["forecasts"][0]
|
||||||
assert first_forecast["duration"] == 30
|
assert first_forecast["duration"] == 30
|
||||||
assert first_forecast["date"] == "2021-09-21"
|
assert first_forecast["date"] == "2021-09-21"
|
||||||
assert first_forecast["per_kwh"] == -0.09
|
assert first_forecast["per_kwh"] == -0.01
|
||||||
assert first_forecast["nem_date"] == "2021-09-21T09:00:00+10:00"
|
assert first_forecast["nem_date"] == "2021-09-21T09:00:00+10:00"
|
||||||
assert first_forecast["spot_per_kwh"] == 0.01
|
assert first_forecast["spot_per_kwh"] == 0.01
|
||||||
assert first_forecast["start_time"] == "2021-09-21T08:30:00+10:00"
|
assert first_forecast["start_time"] == "2021-09-21T08:30:00+10:00"
|
||||||
@ -276,38 +199,52 @@ async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None:
|
|||||||
assert first_forecast["descriptor"] == "very_low"
|
assert first_forecast["descriptor"] == "very_low"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_general")
|
@pytest.mark.usefixtures("mock_amber_client_general_channel")
|
||||||
def test_renewable_sensor(hass: HomeAssistant) -> None:
|
async def test_renewable_sensor(
|
||||||
|
hass: HomeAssistant, general_channel_config_entry: MockConfigEntry
|
||||||
|
) -> None:
|
||||||
"""Testing the creation of the Amber renewables sensor."""
|
"""Testing the creation of the Amber renewables sensor."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
|
|
||||||
assert len(hass.states.async_all()) == 6
|
assert len(hass.states.async_all()) == 6
|
||||||
sensor = hass.states.get("sensor.mock_title_renewables")
|
sensor = hass.states.get("sensor.mock_title_renewables")
|
||||||
assert sensor
|
assert sensor
|
||||||
assert sensor.state == "51"
|
assert sensor.state == "51"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_general")
|
@pytest.mark.usefixtures("mock_amber_client_general_channel")
|
||||||
def test_general_price_descriptor_descriptor_sensor(hass: HomeAssistant) -> None:
|
async def test_general_price_descriptor_descriptor_sensor(
|
||||||
|
hass: HomeAssistant, general_channel_config_entry: MockConfigEntry
|
||||||
|
) -> None:
|
||||||
"""Test the General Price Descriptor sensor."""
|
"""Test the General Price Descriptor sensor."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
assert len(hass.states.async_all()) == 6
|
assert len(hass.states.async_all()) == 6
|
||||||
price = hass.states.get("sensor.mock_title_general_price_descriptor")
|
price = hass.states.get("sensor.mock_title_general_price_descriptor")
|
||||||
assert price
|
assert price
|
||||||
assert price.state == "extremely_low"
|
assert price.state == "extremely_low"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_general_and_controlled_load")
|
@pytest.mark.usefixtures("mock_amber_client_general_and_controlled_load")
|
||||||
def test_general_and_controlled_load_price_descriptor_sensor(
|
async def test_general_and_controlled_load_price_descriptor_sensor(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
general_channel_and_controlled_load_config_entry: MockConfigEntry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the Controlled Price Descriptor sensor."""
|
"""Test the Controlled Price Descriptor sensor."""
|
||||||
|
await setup_integration(hass, general_channel_and_controlled_load_config_entry)
|
||||||
|
|
||||||
assert len(hass.states.async_all()) == 9
|
assert len(hass.states.async_all()) == 9
|
||||||
price = hass.states.get("sensor.mock_title_controlled_load_price_descriptor")
|
price = hass.states.get("sensor.mock_title_controlled_load_price_descriptor")
|
||||||
assert price
|
assert price
|
||||||
assert price.state == "extremely_low"
|
assert price.state == "extremely_low"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_general_and_feed_in")
|
@pytest.mark.usefixtures("mock_amber_client_general_and_feed_in")
|
||||||
def test_general_and_feed_in_price_descriptor_sensor(hass: HomeAssistant) -> None:
|
async def test_general_and_feed_in_price_descriptor_sensor(
|
||||||
|
hass: HomeAssistant, general_channel_and_feed_in_config_entry: MockConfigEntry
|
||||||
|
) -> None:
|
||||||
"""Test the Feed In Price Descriptor sensor."""
|
"""Test the Feed In Price Descriptor sensor."""
|
||||||
|
await setup_integration(hass, general_channel_and_feed_in_config_entry)
|
||||||
|
|
||||||
assert len(hass.states.async_all()) == 9
|
assert len(hass.states.async_all()) == 9
|
||||||
price = hass.states.get("sensor.mock_title_feed_in_price_descriptor")
|
price = hass.states.get("sensor.mock_title_feed_in_price_descriptor")
|
||||||
assert price
|
assert price
|
||||||
|
202
tests/components/amberelectric/test_services.py
Normal file
202
tests/components/amberelectric/test_services.py
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
"""Test the Amber Service object."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.components.amberelectric.const import DOMAIN, SERVICE_GET_FORECASTS
|
||||||
|
from homeassistant.components.amberelectric.services import (
|
||||||
|
ATTR_CHANNEL_TYPE,
|
||||||
|
ATTR_CONFIG_ENTRY_ID,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
|
|
||||||
|
from . import setup_integration
|
||||||
|
from .helpers import (
|
||||||
|
GENERAL_AND_CONTROLLED_SITE_ID,
|
||||||
|
GENERAL_AND_FEED_IN_SITE_ID,
|
||||||
|
GENERAL_ONLY_SITE_ID,
|
||||||
|
)
|
||||||
|
|
||||||
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_forecasts")
|
||||||
|
async def test_get_general_forecasts(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
general_channel_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test fetching general forecasts."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
|
result = await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
{ATTR_CONFIG_ENTRY_ID: GENERAL_ONLY_SITE_ID, ATTR_CHANNEL_TYPE: "general"},
|
||||||
|
blocking=True,
|
||||||
|
return_response=True,
|
||||||
|
)
|
||||||
|
assert len(result["forecasts"]) == 3
|
||||||
|
|
||||||
|
first = result["forecasts"][0]
|
||||||
|
assert first["duration"] == 30
|
||||||
|
assert first["date"] == "2021-09-21"
|
||||||
|
assert first["nem_date"] == "2021-09-21T09:00:00+10:00"
|
||||||
|
assert first["per_kwh"] == 0.09
|
||||||
|
assert first["spot_per_kwh"] == 0.01
|
||||||
|
assert first["start_time"] == "2021-09-21T08:30:00+10:00"
|
||||||
|
assert first["end_time"] == "2021-09-21T09:00:00+10:00"
|
||||||
|
assert first["renewables"] == 50
|
||||||
|
assert first["spike_status"] == "none"
|
||||||
|
assert first["descriptor"] == "very_low"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_forecasts")
|
||||||
|
async def test_get_controlled_load_forecasts(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
general_channel_and_controlled_load_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test fetching general forecasts."""
|
||||||
|
await setup_integration(hass, general_channel_and_controlled_load_config_entry)
|
||||||
|
result = await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
{
|
||||||
|
ATTR_CONFIG_ENTRY_ID: GENERAL_AND_CONTROLLED_SITE_ID,
|
||||||
|
ATTR_CHANNEL_TYPE: "controlled_load",
|
||||||
|
},
|
||||||
|
blocking=True,
|
||||||
|
return_response=True,
|
||||||
|
)
|
||||||
|
assert len(result["forecasts"]) == 3
|
||||||
|
|
||||||
|
first = result["forecasts"][0]
|
||||||
|
assert first["duration"] == 30
|
||||||
|
assert first["date"] == "2021-09-21"
|
||||||
|
assert first["nem_date"] == "2021-09-21T09:00:00+10:00"
|
||||||
|
assert first["per_kwh"] == 0.04
|
||||||
|
assert first["spot_per_kwh"] == 0.01
|
||||||
|
assert first["start_time"] == "2021-09-21T08:30:00+10:00"
|
||||||
|
assert first["end_time"] == "2021-09-21T09:00:00+10:00"
|
||||||
|
assert first["renewables"] == 50
|
||||||
|
assert first["spike_status"] == "none"
|
||||||
|
assert first["descriptor"] == "very_low"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_forecasts")
|
||||||
|
async def test_get_feed_in_forecasts(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
general_channel_and_feed_in_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test fetching general forecasts."""
|
||||||
|
await setup_integration(hass, general_channel_and_feed_in_config_entry)
|
||||||
|
result = await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
{
|
||||||
|
ATTR_CONFIG_ENTRY_ID: GENERAL_AND_FEED_IN_SITE_ID,
|
||||||
|
ATTR_CHANNEL_TYPE: "feed_in",
|
||||||
|
},
|
||||||
|
blocking=True,
|
||||||
|
return_response=True,
|
||||||
|
)
|
||||||
|
assert len(result["forecasts"]) == 3
|
||||||
|
|
||||||
|
first = result["forecasts"][0]
|
||||||
|
assert first["duration"] == 30
|
||||||
|
assert first["date"] == "2021-09-21"
|
||||||
|
assert first["nem_date"] == "2021-09-21T09:00:00+10:00"
|
||||||
|
assert first["per_kwh"] == -0.01
|
||||||
|
assert first["spot_per_kwh"] == 0.01
|
||||||
|
assert first["start_time"] == "2021-09-21T08:30:00+10:00"
|
||||||
|
assert first["end_time"] == "2021-09-21T09:00:00+10:00"
|
||||||
|
assert first["renewables"] == 50
|
||||||
|
assert first["spike_status"] == "none"
|
||||||
|
assert first["descriptor"] == "very_low"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_forecasts")
|
||||||
|
async def test_incorrect_channel_type(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
general_channel_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test error when the channel type is incorrect."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
vol.error.MultipleInvalid,
|
||||||
|
match=re.escape(
|
||||||
|
"value must be one of ['controlled_load', 'feed_in', 'general'] for dictionary value @ data['channel_type']"
|
||||||
|
),
|
||||||
|
):
|
||||||
|
await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
{
|
||||||
|
ATTR_CONFIG_ENTRY_ID: GENERAL_ONLY_SITE_ID,
|
||||||
|
ATTR_CHANNEL_TYPE: "incorrect",
|
||||||
|
},
|
||||||
|
blocking=True,
|
||||||
|
return_response=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_general_forecasts")
|
||||||
|
async def test_unavailable_channel_type(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
general_channel_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test error when the channel type is not found."""
|
||||||
|
await setup_integration(hass, general_channel_config_entry)
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
ServiceValidationError, match="There is no controlled_load channel at this site"
|
||||||
|
):
|
||||||
|
await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
{
|
||||||
|
ATTR_CONFIG_ENTRY_ID: GENERAL_ONLY_SITE_ID,
|
||||||
|
ATTR_CHANNEL_TYPE: "controlled_load",
|
||||||
|
},
|
||||||
|
blocking=True,
|
||||||
|
return_response=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("mock_amber_client_forecasts")
|
||||||
|
async def test_service_entry_availability(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
general_channel_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test the services without valid entry."""
|
||||||
|
general_channel_config_entry.add_to_hass(hass)
|
||||||
|
mock_config_entry2 = MockConfigEntry(domain=DOMAIN)
|
||||||
|
mock_config_entry2.add_to_hass(hass)
|
||||||
|
await hass.config_entries.async_setup(general_channel_config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
with pytest.raises(ServiceValidationError, match="Mock Title is not loaded"):
|
||||||
|
await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
{
|
||||||
|
ATTR_CONFIG_ENTRY_ID: mock_config_entry2.entry_id,
|
||||||
|
ATTR_CHANNEL_TYPE: "general",
|
||||||
|
},
|
||||||
|
blocking=True,
|
||||||
|
return_response=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
ServiceValidationError,
|
||||||
|
match='Config entry "bad-config_id" not found in registry',
|
||||||
|
):
|
||||||
|
await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_GET_FORECASTS,
|
||||||
|
{ATTR_CONFIG_ENTRY_ID: "bad-config_id", ATTR_CHANNEL_TYPE: "general"},
|
||||||
|
blocking=True,
|
||||||
|
return_response=True,
|
||||||
|
)
|
@ -28,7 +28,8 @@ def mock_setup_entry() -> Generator[AsyncMock]:
|
|||||||
def _client_mock():
|
def _client_mock():
|
||||||
client_mock = AsyncMock()
|
client_mock = AsyncMock()
|
||||||
client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES)
|
client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES)
|
||||||
client_mock.models.get_all.return_value = MOCK_MODELS
|
client_mock.models.list.return_value = MOCK_MODELS
|
||||||
|
|
||||||
return client_mock
|
return client_mock
|
||||||
|
|
||||||
|
|
||||||
@ -44,6 +45,10 @@ def mock_async_client() -> Generator[AsyncMock]:
|
|||||||
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
|
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
|
||||||
new=mock_async_client,
|
new=mock_async_client,
|
||||||
),
|
),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.elevenlabs.tts.AsyncElevenLabs",
|
||||||
|
new=mock_async_client,
|
||||||
|
),
|
||||||
):
|
):
|
||||||
yield mock_async_client
|
yield mock_async_client
|
||||||
|
|
||||||
@ -52,8 +57,12 @@ def mock_async_client() -> Generator[AsyncMock]:
|
|||||||
def mock_async_client_api_error() -> Generator[AsyncMock]:
|
def mock_async_client_api_error() -> Generator[AsyncMock]:
|
||||||
"""Override async ElevenLabs client with ApiError side effect."""
|
"""Override async ElevenLabs client with ApiError side effect."""
|
||||||
client_mock = _client_mock()
|
client_mock = _client_mock()
|
||||||
client_mock.models.get_all.side_effect = ApiError
|
api_error = ApiError()
|
||||||
client_mock.voices.get_all.side_effect = ApiError
|
api_error.body = {
|
||||||
|
"detail": {"status": "invalid_api_key", "message": "API key is invalid"}
|
||||||
|
}
|
||||||
|
client_mock.models.list.side_effect = api_error
|
||||||
|
client_mock.voices.get_all.side_effect = api_error
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
@ -68,11 +77,51 @@ def mock_async_client_api_error() -> Generator[AsyncMock]:
|
|||||||
yield mock_async_client
|
yield mock_async_client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_async_client_voices_error() -> Generator[AsyncMock]:
|
||||||
|
"""Override async ElevenLabs client with ApiError side effect."""
|
||||||
|
client_mock = _client_mock()
|
||||||
|
api_error = ApiError()
|
||||||
|
api_error.body = {
|
||||||
|
"detail": {
|
||||||
|
"status": "voices_unauthorized",
|
||||||
|
"message": "API is unauthorized for voices",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
client_mock.voices.get_all.side_effect = api_error
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
|
||||||
|
return_value=client_mock,
|
||||||
|
) as mock_async_client:
|
||||||
|
yield mock_async_client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_async_client_models_error() -> Generator[AsyncMock]:
|
||||||
|
"""Override async ElevenLabs client with ApiError side effect."""
|
||||||
|
client_mock = _client_mock()
|
||||||
|
api_error = ApiError()
|
||||||
|
api_error.body = {
|
||||||
|
"detail": {
|
||||||
|
"status": "models_unauthorized",
|
||||||
|
"message": "API is unauthorized for models",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
client_mock.models.list.side_effect = api_error
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
|
||||||
|
return_value=client_mock,
|
||||||
|
) as mock_async_client:
|
||||||
|
yield mock_async_client
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_async_client_connect_error() -> Generator[AsyncMock]:
|
def mock_async_client_connect_error() -> Generator[AsyncMock]:
|
||||||
"""Override async ElevenLabs client."""
|
"""Override async ElevenLabs client."""
|
||||||
client_mock = _client_mock()
|
client_mock = _client_mock()
|
||||||
client_mock.models.get_all.side_effect = ConnectError("Unknown")
|
client_mock.models.list.side_effect = ConnectError("Unknown")
|
||||||
client_mock.voices.get_all.side_effect = ConnectError("Unknown")
|
client_mock.voices.get_all.side_effect = ConnectError("Unknown")
|
||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user