mirror of
https://github.com/home-assistant/core.git
synced 2025-07-19 11:17:21 +00:00
Merge branch 'dev' of github.com:home-assistant/core into target_trigger
This commit is contained in:
commit
b9246c40ba
@ -535,6 +535,7 @@ homeassistant.components.unifiprotect.*
|
||||
homeassistant.components.upcloud.*
|
||||
homeassistant.components.update.*
|
||||
homeassistant.components.uptime.*
|
||||
homeassistant.components.uptime_kuma.*
|
||||
homeassistant.components.uptimerobot.*
|
||||
homeassistant.components.usb.*
|
||||
homeassistant.components.uvc.*
|
||||
|
6
CODEOWNERS
generated
6
CODEOWNERS
generated
@ -1658,6 +1658,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/upnp/ @StevenLooman
|
||||
/homeassistant/components/uptime/ @frenck
|
||||
/tests/components/uptime/ @frenck
|
||||
/homeassistant/components/uptime_kuma/ @tr4nt0r
|
||||
/tests/components/uptime_kuma/ @tr4nt0r
|
||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/homeassistant/components/usb/ @bdraco
|
||||
@ -1756,8 +1758,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/wirelesstag/ @sergeymaysak
|
||||
/homeassistant/components/withings/ @joostlek
|
||||
/tests/components/withings/ @joostlek
|
||||
/homeassistant/components/wiz/ @sbidy
|
||||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wiz/ @sbidy @arturpragacz
|
||||
/tests/components/wiz/ @sbidy @arturpragacz
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
|
@ -13,7 +13,7 @@ from homeassistant.components.conversation import (
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.helpers.chat_session import ChatSession
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@ -56,12 +56,12 @@ class AITaskEntity(RestoreEntity):
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_session(self.hass) as session,
|
||||
async_get_chat_log(
|
||||
self.hass,
|
||||
session,
|
||||
@ -88,12 +88,13 @@ class AITaskEntity(RestoreEntity):
|
||||
@final
|
||||
async def internal_async_generate_data(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a gen data task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(task) as chat_log:
|
||||
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
|
||||
return await self._async_generate_data(task, chat_log)
|
||||
|
||||
async def _async_generate_data(
|
||||
|
@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
|
@ -3,17 +3,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation, media_source
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
||||
|
||||
|
||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
@ -40,41 +55,79 @@ async def async_generate_data(
|
||||
)
|
||||
|
||||
# Resolve attachments
|
||||
resolved_attachments: list[conversation.Attachment] | None = None
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
created_files: list[Path] = []
|
||||
|
||||
if attachments:
|
||||
if AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments = []
|
||||
|
||||
for attachment in attachments:
|
||||
media = await media_source.async_resolve_media(
|
||||
hass, attachment["media_content_id"], None
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
if media.path is None:
|
||||
raise HomeAssistantError(
|
||||
"Only local attachments are currently supported"
|
||||
)
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=attachment["media_content_id"],
|
||||
url=media.url,
|
||||
media_content_id=media_content_id,
|
||||
mime_type=media.mime_type,
|
||||
path=media.path,
|
||||
)
|
||||
)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments,
|
||||
with async_get_chat_session(hass) as session:
|
||||
if created_files:
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
||||
"requirements": ["aioairzone-cloud==0.6.13"]
|
||||
}
|
||||
|
@ -2,11 +2,22 @@
|
||||
|
||||
import amberelectric
|
||||
|
||||
from homeassistant.components.sensor import ConfigType
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_SITE_ID, PLATFORMS
|
||||
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .services import setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Amber component."""
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:
|
||||
|
@ -1,14 +1,24 @@
|
||||
"""Amber Electric Constants."""
|
||||
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "amberelectric"
|
||||
DOMAIN: Final = "amberelectric"
|
||||
CONF_SITE_NAME = "site_name"
|
||||
CONF_SITE_ID = "site_id"
|
||||
|
||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
||||
ATTR_CHANNEL_TYPE = "channel_type"
|
||||
|
||||
ATTRIBUTION = "Data provided by Amber Electric"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
SERVICE_GET_FORECASTS = "get_forecasts"
|
||||
|
||||
GENERAL_CHANNEL = "general"
|
||||
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
||||
FEED_IN_CHANNEL = "feed_in"
|
||||
|
@ -10,7 +10,6 @@ from amberelectric.models.actual_interval import ActualInterval
|
||||
from amberelectric.models.channel import ChannelType
|
||||
from amberelectric.models.current_interval import CurrentInterval
|
||||
from amberelectric.models.forecast_interval import ForecastInterval
|
||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||
from amberelectric.rest import ApiException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@ -18,6 +17,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import LOGGER
|
||||
from .helpers import normalize_descriptor
|
||||
|
||||
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
||||
|
||||
@ -49,27 +49,6 @@ def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) ->
|
||||
return interval.channel_type == ChannelType.FEEDIN
|
||||
|
||||
|
||||
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
||||
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
||||
if descriptor is None:
|
||||
return None
|
||||
if descriptor.value == "spike":
|
||||
return "spike"
|
||||
if descriptor.value == "high":
|
||||
return "high"
|
||||
if descriptor.value == "neutral":
|
||||
return "neutral"
|
||||
if descriptor.value == "low":
|
||||
return "low"
|
||||
if descriptor.value == "veryLow":
|
||||
return "very_low"
|
||||
if descriptor.value == "extremelyLow":
|
||||
return "extremely_low"
|
||||
if descriptor.value == "negative":
|
||||
return "negative"
|
||||
return None
|
||||
|
||||
|
||||
class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
|
||||
|
||||
@ -103,7 +82,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"grid": {},
|
||||
}
|
||||
try:
|
||||
data = self._api.get_current_prices(self.site_id, next=48)
|
||||
data = self._api.get_current_prices(self.site_id, next=288)
|
||||
intervals = [interval.actual_instance for interval in data]
|
||||
except ApiException as api_exception:
|
||||
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
||||
|
25
homeassistant/components/amberelectric/helpers.py
Normal file
25
homeassistant/components/amberelectric/helpers.py
Normal file
@ -0,0 +1,25 @@
|
||||
"""Formatting helpers used to convert things."""
|
||||
|
||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||
|
||||
DESCRIPTOR_MAP: dict[str, str] = {
|
||||
PriceDescriptor.SPIKE: "spike",
|
||||
PriceDescriptor.HIGH: "high",
|
||||
PriceDescriptor.NEUTRAL: "neutral",
|
||||
PriceDescriptor.LOW: "low",
|
||||
PriceDescriptor.VERYLOW: "very_low",
|
||||
PriceDescriptor.EXTREMELYLOW: "extremely_low",
|
||||
PriceDescriptor.NEGATIVE: "negative",
|
||||
}
|
||||
|
||||
|
||||
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
||||
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
||||
if descriptor in DESCRIPTOR_MAP:
|
||||
return DESCRIPTOR_MAP[descriptor]
|
||||
return None
|
||||
|
||||
|
||||
def format_cents_to_dollars(cents: float) -> float:
|
||||
"""Return a formatted conversion from cents to dollars."""
|
||||
return round(cents / 100, 2)
|
@ -22,5 +22,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_forecasts": {
|
||||
"service": "mdi:transmission-tower"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -23,16 +23,12 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTRIBUTION
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator, normalize_descriptor
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||
|
||||
UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||
|
||||
|
||||
def format_cents_to_dollars(cents: float) -> float:
|
||||
"""Return a formatted conversion from cents to dollars."""
|
||||
return round(cents / 100, 2)
|
||||
|
||||
|
||||
def friendly_channel_type(channel_type: str) -> str:
|
||||
"""Return a human readable version of the channel type."""
|
||||
if channel_type == "controlled_load":
|
||||
|
121
homeassistant/components/amberelectric/services.py
Normal file
121
homeassistant/components/amberelectric/services.py
Normal file
@ -0,0 +1,121 @@
|
||||
"""Amber Electric Service class."""
|
||||
|
||||
from amberelectric.models.channel import ChannelType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
from .const import (
|
||||
ATTR_CHANNEL_TYPE,
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
CONTROLLED_LOAD_CHANNEL,
|
||||
DOMAIN,
|
||||
FEED_IN_CHANNEL,
|
||||
GENERAL_CHANNEL,
|
||||
SERVICE_GET_FORECASTS,
|
||||
)
|
||||
from .coordinator import AmberConfigEntry
|
||||
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||
|
||||
GET_FORECASTS_SCHEMA = vol.Schema(
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: ConfigEntrySelector({"integration": DOMAIN}),
|
||||
ATTR_CHANNEL_TYPE: vol.In(
|
||||
[GENERAL_CHANNEL, CONTROLLED_LOAD_CHANNEL, FEED_IN_CHANNEL]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> AmberConfigEntry:
|
||||
"""Get the Amber config entry."""
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": config_entry_id},
|
||||
)
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": entry.title},
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||
"""Return an array of forecasts."""
|
||||
results: list[JsonValueType] = []
|
||||
|
||||
if channel_type not in data["forecasts"]:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="channel_not_found",
|
||||
translation_placeholders={"channel_type": channel_type},
|
||||
)
|
||||
|
||||
intervals = data["forecasts"][channel_type]
|
||||
|
||||
for interval in intervals:
|
||||
datum = {}
|
||||
datum["duration"] = interval.duration
|
||||
datum["date"] = interval.var_date.isoformat()
|
||||
datum["nem_date"] = interval.nem_time.isoformat()
|
||||
datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
|
||||
if interval.channel_type == ChannelType.FEEDIN:
|
||||
datum["per_kwh"] = datum["per_kwh"] * -1
|
||||
datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
|
||||
datum["start_time"] = interval.start_time.isoformat()
|
||||
datum["end_time"] = interval.end_time.isoformat()
|
||||
datum["renewables"] = round(interval.renewables)
|
||||
datum["spike_status"] = interval.spike_status.value
|
||||
datum["descriptor"] = normalize_descriptor(interval.descriptor)
|
||||
|
||||
if interval.range is not None:
|
||||
datum["range_min"] = format_cents_to_dollars(interval.range.min)
|
||||
datum["range_max"] = format_cents_to_dollars(interval.range.max)
|
||||
|
||||
if interval.advanced_price is not None:
|
||||
multiplier = -1 if interval.channel_type == ChannelType.FEEDIN else 1
|
||||
datum["advanced_price_low"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.low
|
||||
)
|
||||
datum["advanced_price_predicted"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.predicted
|
||||
)
|
||||
datum["advanced_price_high"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.high
|
||||
)
|
||||
|
||||
results.append(datum)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amber integration."""
|
||||
|
||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||
channel_type = call.data[ATTR_CHANNEL_TYPE]
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
coordinator = entry.runtime_data
|
||||
forecasts = get_forecasts(channel_type, coordinator.data)
|
||||
return {"forecasts": forecasts}
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_FORECASTS,
|
||||
handle_get_forecasts,
|
||||
GET_FORECASTS_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
16
homeassistant/components/amberelectric/services.yaml
Normal file
16
homeassistant/components/amberelectric/services.yaml
Normal file
@ -0,0 +1,16 @@
|
||||
get_forecasts:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: amberelectric
|
||||
channel_type:
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- general
|
||||
- controlled_load
|
||||
- feed_in
|
||||
translation_key: channel_type
|
@ -1,25 +1,61 @@
|
||||
{
|
||||
"config": {
|
||||
"error": {
|
||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"site": {
|
||||
"data": {
|
||||
"site_id": "Site NMI",
|
||||
"site_name": "Site name"
|
||||
},
|
||||
"description": "Select the NMI of the site you would like to add"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||
"site_id": "Site ID"
|
||||
},
|
||||
"description": "Go to {api_url} to generate an API key"
|
||||
},
|
||||
"site": {
|
||||
"data": {
|
||||
"site_id": "Site NMI",
|
||||
"site_name": "Site Name"
|
||||
},
|
||||
"description": "Select the NMI of the site you would like to add"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_forecasts": {
|
||||
"name": "Get price forecasts",
|
||||
"description": "Retrieves price forecasts from Amber Electric for a site.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "The config entry of the site to get forecasts for.",
|
||||
"name": "Config entry"
|
||||
},
|
||||
"channel_type": {
|
||||
"name": "Channel type",
|
||||
"description": "The channel to get forecasts for."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"integration_not_found": {
|
||||
"message": "Config entry \"{target}\" not found in registry."
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"channel_not_found": {
|
||||
"message": "There is no {channel_type} channel at this site."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"channel_type": {
|
||||
"options": {
|
||||
"general": "General",
|
||||
"controlled_load": "Controlled load",
|
||||
"feed_in": "Feed-in"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["amcrest"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["amcrest==1.9.8"]
|
||||
"requirements": ["amcrest==1.9.9"]
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNA
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_PASSKEY, DEFAULT_PORT, DOMAIN
|
||||
|
||||
@ -21,12 +22,15 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
host: str
|
||||
port: int
|
||||
mac: str
|
||||
passkey: str | None = None
|
||||
username: str | None = None
|
||||
password: str | None = None
|
||||
def __init__(self) -> None:
|
||||
"""Initialize BSBLan flow."""
|
||||
self.host: str | None = None
|
||||
self.port: int = DEFAULT_PORT
|
||||
self.mac: str | None = None
|
||||
self.passkey: str | None = None
|
||||
self.username: str | None = None
|
||||
self.password: str | None = None
|
||||
self._auth_required = True
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -41,9 +45,111 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.username = user_input.get(CONF_USERNAME)
|
||||
self.password = user_input.get(CONF_PASSWORD)
|
||||
|
||||
return await self._validate_and_create()
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle Zeroconf discovery."""
|
||||
|
||||
self.host = str(discovery_info.ip_address)
|
||||
self.port = discovery_info.port or DEFAULT_PORT
|
||||
|
||||
# Get MAC from properties
|
||||
self.mac = discovery_info.properties.get("mac")
|
||||
|
||||
# If MAC was found in zeroconf, use it immediately
|
||||
if self.mac:
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
CONF_PORT: self.port,
|
||||
}
|
||||
)
|
||||
else:
|
||||
# MAC not available from zeroconf - check for existing host/port first
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: self.host, CONF_PORT: self.port}
|
||||
)
|
||||
|
||||
# Try to get device info without authentication to minimize discovery popup
|
||||
config = BSBLANConfig(host=self.host, port=self.port)
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
try:
|
||||
device = await bsblan.device()
|
||||
except BSBLANError:
|
||||
# Device requires authentication - proceed to discovery confirm
|
||||
self.mac = None
|
||||
else:
|
||||
self.mac = device.MAC
|
||||
|
||||
# Got MAC without auth - set unique ID and check for existing device
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
CONF_PORT: self.port,
|
||||
}
|
||||
)
|
||||
# No auth needed, so we can proceed to a confirmation step without fields
|
||||
self._auth_required = False
|
||||
|
||||
# Proceed to get credentials
|
||||
self.context["title_placeholders"] = {"name": f"BSBLAN {self.host}"}
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle getting credentials for discovered device."""
|
||||
if user_input is None:
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_PASSKEY): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
if not self._auth_required:
|
||||
data_schema = vol.Schema({})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"host": str(self.host)},
|
||||
)
|
||||
|
||||
if not self._auth_required:
|
||||
return self._async_create_entry()
|
||||
|
||||
self.passkey = user_input.get(CONF_PASSKEY)
|
||||
self.username = user_input.get(CONF_USERNAME)
|
||||
self.password = user_input.get(CONF_PASSWORD)
|
||||
|
||||
return await self._validate_and_create(is_discovery=True)
|
||||
|
||||
async def _validate_and_create(
|
||||
self, is_discovery: bool = False
|
||||
) -> ConfigFlowResult:
|
||||
"""Validate device connection and create entry."""
|
||||
try:
|
||||
await self._get_bsblan_info()
|
||||
await self._get_bsblan_info(is_discovery=is_discovery)
|
||||
except BSBLANError:
|
||||
if is_discovery:
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_PASSKEY): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
errors={"base": "cannot_connect"},
|
||||
description_placeholders={"host": str(self.host)},
|
||||
)
|
||||
return self._show_setup_form({"base": "cannot_connect"})
|
||||
|
||||
return self._async_create_entry()
|
||||
@ -67,6 +173,7 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@callback
|
||||
def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create the config entry."""
|
||||
return self.async_create_entry(
|
||||
title=format_mac(self.mac),
|
||||
data={
|
||||
@ -78,8 +185,10 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
async def _get_bsblan_info(self, raise_on_progress: bool = True) -> None:
|
||||
"""Get device information from an BSBLAN device."""
|
||||
async def _get_bsblan_info(
|
||||
self, raise_on_progress: bool = True, is_discovery: bool = False
|
||||
) -> None:
|
||||
"""Get device information from a BSBLAN device."""
|
||||
config = BSBLANConfig(
|
||||
host=self.host,
|
||||
passkey=self.passkey,
|
||||
@ -90,11 +199,18 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
device = await bsblan.device()
|
||||
self.mac = device.MAC
|
||||
retrieved_mac = device.MAC
|
||||
|
||||
await self.async_set_unique_id(
|
||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||
)
|
||||
# Handle unique ID assignment based on whether MAC was available from zeroconf
|
||||
if not self.mac:
|
||||
# MAC wasn't available from zeroconf, now we have it from API
|
||||
self.mac = retrieved_mac
|
||||
await self.async_set_unique_id(
|
||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||
)
|
||||
|
||||
# Always allow updating host/port for both user and discovery flows
|
||||
# This ensures connectivity is maintained when devices change IP addresses
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
|
@ -7,5 +7,11 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==2.1.0"]
|
||||
"requirements": ["python-bsblan==2.1.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
"name": "bsb-lan*"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -20,6 +20,8 @@ from . import BSBLanConfigEntry, BSBLanData
|
||||
from .coordinator import BSBLanCoordinatorData
|
||||
from .entity import BSBLanEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class BSBLanSensorEntityDescription(SensorEntityDescription):
|
||||
|
@ -13,7 +13,25 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your BSB-Lan device."
|
||||
"host": "The hostname or IP address of your BSB-Lan device.",
|
||||
"port": "The port number of your BSB-Lan device.",
|
||||
"passkey": "The passkey for your BSB-Lan device.",
|
||||
"username": "The username for your BSB-Lan device.",
|
||||
"password": "The password for your BSB-Lan device."
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"title": "BSB-Lan device discovered",
|
||||
"description": "A BSB-Lan device was discovered at {host}. Please provide credentials if required.",
|
||||
"data": {
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data_description::passkey%]",
|
||||
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
|
||||
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -147,9 +147,6 @@ class Attachment:
|
||||
media_content_id: str
|
||||
"""Media content ID of the attachment."""
|
||||
|
||||
url: str
|
||||
"""URL of the attachment."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the attachment."""
|
||||
|
||||
|
@ -25,7 +25,8 @@ PLATFORMS: list[Platform] = [Platform.TTS]
|
||||
|
||||
async def get_model_by_id(client: AsyncElevenLabs, model_id: str) -> Model | None:
|
||||
"""Get ElevenLabs model from their API by the model_id."""
|
||||
models = await client.models.get_all()
|
||||
models = await client.models.list()
|
||||
|
||||
for maybe_model in models:
|
||||
if maybe_model.model_id == model_id:
|
||||
return maybe_model
|
||||
|
@ -23,14 +23,12 @@ from . import ElevenLabsConfigEntry
|
||||
from .const import (
|
||||
CONF_CONFIGURE_VOICE,
|
||||
CONF_MODEL,
|
||||
CONF_OPTIMIZE_LATENCY,
|
||||
CONF_SIMILARITY,
|
||||
CONF_STABILITY,
|
||||
CONF_STYLE,
|
||||
CONF_USE_SPEAKER_BOOST,
|
||||
CONF_VOICE,
|
||||
DEFAULT_MODEL,
|
||||
DEFAULT_OPTIMIZE_LATENCY,
|
||||
DEFAULT_SIMILARITY,
|
||||
DEFAULT_STABILITY,
|
||||
DEFAULT_STYLE,
|
||||
@ -51,7 +49,8 @@ async def get_voices_models(
|
||||
httpx_client = get_async_client(hass)
|
||||
client = AsyncElevenLabs(api_key=api_key, httpx_client=httpx_client)
|
||||
voices = (await client.voices.get_all()).voices
|
||||
models = await client.models.get_all()
|
||||
models = await client.models.list()
|
||||
|
||||
voices_dict = {
|
||||
voice.voice_id: voice.name
|
||||
for voice in sorted(voices, key=lambda v: v.name or "")
|
||||
@ -78,8 +77,13 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
voices, _ = await get_voices_models(self.hass, user_input[CONF_API_KEY])
|
||||
except ApiError:
|
||||
errors["base"] = "invalid_api_key"
|
||||
except ApiError as exc:
|
||||
errors["base"] = "unknown"
|
||||
details = getattr(exc, "body", {}).get("detail", {})
|
||||
if details:
|
||||
status = details.get("status")
|
||||
if status == "invalid_api_key":
|
||||
errors["base"] = "invalid_api_key"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title="ElevenLabs",
|
||||
@ -206,12 +210,6 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
vol.Coerce(float),
|
||||
vol.Range(min=0, max=1),
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_OPTIMIZE_LATENCY,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY
|
||||
),
|
||||
): vol.All(int, vol.Range(min=0, max=4)),
|
||||
vol.Optional(
|
||||
CONF_STYLE,
|
||||
default=self.config_entry.options.get(CONF_STYLE, DEFAULT_STYLE),
|
||||
|
@ -7,7 +7,6 @@ CONF_MODEL = "model"
|
||||
CONF_CONFIGURE_VOICE = "configure_voice"
|
||||
CONF_STABILITY = "stability"
|
||||
CONF_SIMILARITY = "similarity"
|
||||
CONF_OPTIMIZE_LATENCY = "optimize_streaming_latency"
|
||||
CONF_STYLE = "style"
|
||||
CONF_USE_SPEAKER_BOOST = "use_speaker_boost"
|
||||
DOMAIN = "elevenlabs"
|
||||
@ -15,6 +14,5 @@ DOMAIN = "elevenlabs"
|
||||
DEFAULT_MODEL = "eleven_multilingual_v2"
|
||||
DEFAULT_STABILITY = 0.5
|
||||
DEFAULT_SIMILARITY = 0.75
|
||||
DEFAULT_OPTIMIZE_LATENCY = 0
|
||||
DEFAULT_STYLE = 0
|
||||
DEFAULT_USE_SPEAKER_BOOST = True
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["elevenlabs"],
|
||||
"requirements": ["elevenlabs==1.9.0"]
|
||||
"requirements": ["elevenlabs==2.3.0"]
|
||||
}
|
||||
|
@ -11,7 +11,8 @@
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]"
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@ -32,14 +33,12 @@
|
||||
"data": {
|
||||
"stability": "Stability",
|
||||
"similarity": "Similarity",
|
||||
"optimize_streaming_latency": "Latency",
|
||||
"style": "Style",
|
||||
"use_speaker_boost": "Speaker boost"
|
||||
},
|
||||
"data_description": {
|
||||
"stability": "Stability of the generated audio. Higher values lead to less emotional audio.",
|
||||
"similarity": "Similarity of the generated audio to the original voice. Higher values may result in more similar audio, but may also introduce background noise.",
|
||||
"optimize_streaming_latency": "Optimize the model for streaming. This may reduce the quality of the generated audio.",
|
||||
"style": "Style of the generated audio. Recommended to keep at 0 for most almost all use cases.",
|
||||
"use_speaker_boost": "Use speaker boost to increase the similarity of the generated audio to the original voice."
|
||||
}
|
||||
|
@ -25,13 +25,11 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import ElevenLabsConfigEntry
|
||||
from .const import (
|
||||
ATTR_MODEL,
|
||||
CONF_OPTIMIZE_LATENCY,
|
||||
CONF_SIMILARITY,
|
||||
CONF_STABILITY,
|
||||
CONF_STYLE,
|
||||
CONF_USE_SPEAKER_BOOST,
|
||||
CONF_VOICE,
|
||||
DEFAULT_OPTIMIZE_LATENCY,
|
||||
DEFAULT_SIMILARITY,
|
||||
DEFAULT_STABILITY,
|
||||
DEFAULT_STYLE,
|
||||
@ -75,9 +73,6 @@ async def async_setup_entry(
|
||||
config_entry.entry_id,
|
||||
config_entry.title,
|
||||
voice_settings,
|
||||
config_entry.options.get(
|
||||
CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
@ -98,7 +93,6 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
entry_id: str,
|
||||
title: str,
|
||||
voice_settings: VoiceSettings,
|
||||
latency: int = 0,
|
||||
) -> None:
|
||||
"""Init ElevenLabs TTS service."""
|
||||
self._client = client
|
||||
@ -115,7 +109,6 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
if voice_indices:
|
||||
self._voices.insert(0, self._voices.pop(voice_indices[0]))
|
||||
self._voice_settings = voice_settings
|
||||
self._latency = latency
|
||||
|
||||
# Entity attributes
|
||||
self._attr_unique_id = entry_id
|
||||
@ -144,14 +137,14 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
voice_id = options.get(ATTR_VOICE, self._default_voice_id)
|
||||
model = options.get(ATTR_MODEL, self._model.model_id)
|
||||
try:
|
||||
audio = await self._client.generate(
|
||||
audio = self._client.text_to_speech.convert(
|
||||
text=message,
|
||||
voice=voice_id,
|
||||
optimize_streaming_latency=self._latency,
|
||||
voice_id=voice_id,
|
||||
voice_settings=self._voice_settings,
|
||||
model=model,
|
||||
model_id=model,
|
||||
)
|
||||
bytes_combined = b"".join([byte_seg async for byte_seg in audio])
|
||||
|
||||
except ApiError as exc:
|
||||
_LOGGER.warning(
|
||||
"Error during processing of TTS request %s", exc, exc_info=True
|
||||
|
@ -113,9 +113,7 @@ class HomematicipHAP:
|
||||
|
||||
self._ws_close_requested = False
|
||||
self._ws_connection_closed = asyncio.Event()
|
||||
self._retry_task: asyncio.Task | None = None
|
||||
self._tries = 0
|
||||
self._accesspoint_connected = True
|
||||
self._get_state_task: asyncio.Task | None = None
|
||||
self.hmip_device_by_entity_id: dict[str, Any] = {}
|
||||
self.reset_connection_listener: Callable | None = None
|
||||
|
||||
@ -161,17 +159,8 @@ class HomematicipHAP:
|
||||
"""
|
||||
if not self.home.connected:
|
||||
_LOGGER.error("HMIP access point has lost connection with the cloud")
|
||||
self._accesspoint_connected = False
|
||||
self._ws_connection_closed.set()
|
||||
self.set_all_to_unavailable()
|
||||
elif not self._accesspoint_connected:
|
||||
# Now the HOME_CHANGED event has fired indicating the access
|
||||
# point has reconnected to the cloud again.
|
||||
# Explicitly getting an update as entity states might have
|
||||
# changed during access point disconnect."""
|
||||
|
||||
job = self.hass.async_create_task(self.get_state())
|
||||
job.add_done_callback(self.get_state_finished)
|
||||
self._accesspoint_connected = True
|
||||
|
||||
@callback
|
||||
def async_create_entity(self, *args, **kwargs) -> None:
|
||||
@ -185,20 +174,43 @@ class HomematicipHAP:
|
||||
await asyncio.sleep(30)
|
||||
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||
|
||||
async def _try_get_state(self) -> None:
|
||||
"""Call get_state in a loop until no error occurs, using exponential backoff on error."""
|
||||
|
||||
# Wait until WebSocket connection is established.
|
||||
while not self.home.websocket_is_connected():
|
||||
await asyncio.sleep(2)
|
||||
|
||||
delay = 8
|
||||
max_delay = 1500
|
||||
while True:
|
||||
try:
|
||||
await self.get_state()
|
||||
break
|
||||
except HmipConnectionError as err:
|
||||
_LOGGER.warning(
|
||||
"Get_state failed, retrying in %s seconds: %s", delay, err
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
delay = min(delay * 2, max_delay)
|
||||
|
||||
async def get_state(self) -> None:
|
||||
"""Update HMIP state and tell Home Assistant."""
|
||||
await self.home.get_current_state_async()
|
||||
self.update_all()
|
||||
|
||||
def get_state_finished(self, future) -> None:
|
||||
"""Execute when get_state coroutine has finished."""
|
||||
"""Execute when try_get_state coroutine has finished."""
|
||||
try:
|
||||
future.result()
|
||||
except HmipConnectionError:
|
||||
# Somehow connection could not recover. Will disconnect and
|
||||
# so reconnect loop is taking over.
|
||||
_LOGGER.error("Updating state after HMIP access point reconnect failed")
|
||||
self.hass.async_create_task(self.home.disable_events())
|
||||
except Exception as err: # noqa: BLE001
|
||||
_LOGGER.error(
|
||||
"Error updating state after HMIP access point reconnect: %s", err
|
||||
)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Updating state after HMIP access point reconnect finished successfully",
|
||||
)
|
||||
|
||||
def set_all_to_unavailable(self) -> None:
|
||||
"""Set all devices to unavailable and tell Home Assistant."""
|
||||
@ -222,8 +234,8 @@ class HomematicipHAP:
|
||||
async def async_reset(self) -> bool:
|
||||
"""Close the websocket connection."""
|
||||
self._ws_close_requested = True
|
||||
if self._retry_task is not None:
|
||||
self._retry_task.cancel()
|
||||
if self._get_state_task is not None:
|
||||
self._get_state_task.cancel()
|
||||
await self.home.disable_events_async()
|
||||
_LOGGER.debug("Closed connection to HomematicIP cloud server")
|
||||
await self.hass.config_entries.async_unload_platforms(
|
||||
@ -247,7 +259,9 @@ class HomematicipHAP:
|
||||
"""Handle websocket connected."""
|
||||
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
|
||||
if self._ws_connection_closed.is_set():
|
||||
await self.get_state()
|
||||
self._get_state_task = self.hass.async_create_task(self._try_get_state())
|
||||
self._get_state_task.add_done_callback(self.get_state_finished)
|
||||
|
||||
self._ws_connection_closed.clear()
|
||||
|
||||
async def ws_disconnected_handler(self) -> None:
|
||||
@ -256,11 +270,12 @@ class HomematicipHAP:
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def ws_reconnected_handler(self, reason: str) -> None:
|
||||
"""Handle websocket reconnection."""
|
||||
"""Handle websocket reconnection. Is called when Websocket tries to reconnect."""
|
||||
_LOGGER.info(
|
||||
"Websocket connection to HomematicIP Cloud re-established due to reason: %s",
|
||||
"Websocket connection to HomematicIP Cloud trying to reconnect due to reason: %s",
|
||||
reason,
|
||||
)
|
||||
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def get_hap(
|
||||
|
@ -2,13 +2,20 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homematicip.base.enums import DeviceType, OpticalSignalBehaviour, RGBColorState
|
||||
from homematicip.base.enums import (
|
||||
DeviceType,
|
||||
FunctionalChannelType,
|
||||
OpticalSignalBehaviour,
|
||||
RGBColorState,
|
||||
)
|
||||
from homematicip.base.functionalChannels import NotificationLightChannel
|
||||
from homematicip.device import (
|
||||
BrandDimmer,
|
||||
BrandSwitchNotificationLight,
|
||||
Device,
|
||||
Dimmer,
|
||||
DinRailDimmer3,
|
||||
FullFlushDimmer,
|
||||
@ -34,6 +41,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from .entity import HomematicipGenericEntity
|
||||
from .hap import HomematicIPConfigEntry, HomematicipHAP
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@ -43,6 +52,14 @@ async def async_setup_entry(
|
||||
"""Set up the HomematicIP Cloud lights from a config entry."""
|
||||
hap = config_entry.runtime_data
|
||||
entities: list[HomematicipGenericEntity] = []
|
||||
|
||||
entities.extend(
|
||||
HomematicipLightHS(hap, d, ch.index)
|
||||
for d in hap.home.devices
|
||||
for ch in d.functionalChannels
|
||||
if ch.functionalChannelType == FunctionalChannelType.UNIVERSAL_LIGHT_CHANNEL
|
||||
)
|
||||
|
||||
for device in hap.home.devices:
|
||||
if (
|
||||
isinstance(device, SwitchMeasuring)
|
||||
@ -104,6 +121,64 @@ class HomematicipLight(HomematicipGenericEntity, LightEntity):
|
||||
await self._device.turn_off_async()
|
||||
|
||||
|
||||
class HomematicipLightHS(HomematicipGenericEntity, LightEntity):
|
||||
"""Representation of the HomematicIP light with HS color mode."""
|
||||
|
||||
_attr_color_mode = ColorMode.HS
|
||||
_attr_supported_color_modes = {ColorMode.HS}
|
||||
|
||||
def __init__(self, hap: HomematicipHAP, device: Device, channel_index: int) -> None:
|
||||
"""Initialize the light entity."""
|
||||
super().__init__(hap, device, channel=channel_index, is_multi_channel=True)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if light is on."""
|
||||
return self.functional_channel.on
|
||||
|
||||
@property
|
||||
def brightness(self) -> int | None:
|
||||
"""Return the current brightness."""
|
||||
return int(self.functional_channel.dimLevel * 255.0)
|
||||
|
||||
@property
|
||||
def hs_color(self) -> tuple[float, float] | None:
|
||||
"""Return the hue and saturation color value [float, float]."""
|
||||
if (
|
||||
self.functional_channel.hue is None
|
||||
or self.functional_channel.saturationLevel is None
|
||||
):
|
||||
return None
|
||||
return (
|
||||
self.functional_channel.hue,
|
||||
self.functional_channel.saturationLevel * 100.0,
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on."""
|
||||
|
||||
hs_color = kwargs.get(ATTR_HS_COLOR, (0.0, 0.0))
|
||||
hue = hs_color[0] % 360.0
|
||||
saturation = hs_color[1] / 100.0
|
||||
dim_level = round(kwargs.get(ATTR_BRIGHTNESS, 255) / 255.0, 2)
|
||||
|
||||
if ATTR_HS_COLOR not in kwargs:
|
||||
hue = self.functional_channel.hue
|
||||
saturation = self.functional_channel.saturationLevel
|
||||
|
||||
if ATTR_BRIGHTNESS not in kwargs:
|
||||
# If no brightness is set, use the current brightness
|
||||
dim_level = self.functional_channel.dimLevel or 1.0
|
||||
|
||||
await self.functional_channel.set_hue_saturation_dim_level_async(
|
||||
hue=hue, saturation_level=saturation, dim_level=dim_level
|
||||
)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
await self.functional_channel.set_switch_state_async(on=False)
|
||||
|
||||
|
||||
class HomematicipLightMeasuring(HomematicipLight):
|
||||
"""Representation of the HomematicIP measuring light."""
|
||||
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.0.6"]
|
||||
"requirements": ["homematicip==2.0.7"]
|
||||
}
|
||||
|
@ -18,6 +18,9 @@ from homematicip.device import (
|
||||
PrintedCircuitBoardSwitch2,
|
||||
PrintedCircuitBoardSwitchBattery,
|
||||
SwitchMeasuring,
|
||||
WiredInput32,
|
||||
WiredInputSwitch6,
|
||||
WiredSwitch4,
|
||||
WiredSwitch8,
|
||||
)
|
||||
from homematicip.group import ExtendedLinkedSwitchingGroup, SwitchingGroup
|
||||
@ -51,6 +54,7 @@ async def async_setup_entry(
|
||||
elif isinstance(
|
||||
device,
|
||||
(
|
||||
WiredSwitch4,
|
||||
WiredSwitch8,
|
||||
OpenCollector8Module,
|
||||
BrandSwitch2,
|
||||
@ -60,6 +64,8 @@ async def async_setup_entry(
|
||||
MotionDetectorSwitchOutdoor,
|
||||
DinRailSwitch,
|
||||
DinRailSwitch4,
|
||||
WiredInput32,
|
||||
WiredInputSwitch6,
|
||||
),
|
||||
):
|
||||
channel_indices = [
|
||||
|
@ -11,9 +11,9 @@ from aiopvapi.shades import Shades
|
||||
from homeassistant.const import CONF_API_VERSION, CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .const import DOMAIN, HUB_EXCEPTIONS
|
||||
from .const import DOMAIN, HUB_EXCEPTIONS, MANUFACTURER
|
||||
from .coordinator import PowerviewShadeUpdateCoordinator
|
||||
from .model import PowerviewConfigEntry, PowerviewEntryData
|
||||
from .shade_data import PowerviewShadeData
|
||||
@ -64,6 +64,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) ->
|
||||
)
|
||||
return False
|
||||
|
||||
# manual registration of the hub
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, hub.mac_address)},
|
||||
identifiers={(DOMAIN, hub.serial_number)},
|
||||
manufacturer=MANUFACTURER,
|
||||
name=hub.name,
|
||||
model=hub.model,
|
||||
sw_version=hub.firmware,
|
||||
hw_version=hub.main_processor_version.name,
|
||||
)
|
||||
|
||||
try:
|
||||
rooms = Rooms(pv_request)
|
||||
room_data: PowerviewData = await rooms.get_rooms()
|
||||
|
@ -112,16 +112,8 @@ class HuumDevice(ClimateEntity):
|
||||
await self._turn_on(temperature)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest status data.
|
||||
|
||||
We get the latest status first from the status endpoints of the sauna.
|
||||
If that data does not include the temperature, that means that the sauna
|
||||
is off, we then call the off command which will in turn return the temperature.
|
||||
This is a workaround for getting the temperature as the Huum API does not
|
||||
return the target temperature of a sauna that is off, even if it can have
|
||||
a target temperature at that time.
|
||||
"""
|
||||
self._status = await self._huum_handler.status_from_status_or_stop()
|
||||
"""Get the latest status data."""
|
||||
self._status = await self._huum_handler.status()
|
||||
if self._target_temperature is None or self.hvac_mode == HVACMode.HEAT:
|
||||
self._target_temperature = self._status.target_temperature
|
||||
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/huum",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["huum==0.7.12"]
|
||||
"requirements": ["huum==0.8.0"]
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
@ -21,6 +22,8 @@ from .const import CONTENT_TYPE_MAP, LOGGER, MAX_IMAGE_WIDTH
|
||||
from .coordinator import JellyfinConfigEntry, JellyfinDataUpdateCoordinator
|
||||
from .entity import JellyfinClientEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@ -177,10 +180,15 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
def supported_features(self) -> MediaPlayerEntityFeature:
|
||||
"""Flag media player features that are supported."""
|
||||
commands: list[str] = self.capabilities.get("SupportedCommands", [])
|
||||
controllable = self.capabilities.get("SupportsMediaControl", False)
|
||||
_LOGGER.debug(
|
||||
"Supported commands for device %s, client %s, %s",
|
||||
self.device_name,
|
||||
self.client_name,
|
||||
commands,
|
||||
)
|
||||
features = MediaPlayerEntityFeature(0)
|
||||
|
||||
if controllable:
|
||||
if "PlayMediaSource" in commands:
|
||||
features |= (
|
||||
MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Any, Final, Literal
|
||||
|
||||
@ -20,8 +19,8 @@ from xknx.io.util import validate_ip as xknx_validate_ip
|
||||
from xknx.secure.keyring import Keyring, XMLInterface
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigEntry,
|
||||
ConfigEntryBaseFlow,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
@ -103,12 +102,14 @@ _PORT_SELECTOR = vol.All(
|
||||
)
|
||||
|
||||
|
||||
class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
"""Base class for KNX flows."""
|
||||
class KNXConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a KNX config flow."""
|
||||
|
||||
def __init__(self, initial_data: KNXConfigEntryData) -> None:
|
||||
"""Initialize KNXCommonFlow."""
|
||||
self.initial_data = initial_data
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize KNX config flow."""
|
||||
self.initial_data = DEFAULT_ENTRY_DATA
|
||||
self.new_entry_data = KNXConfigEntryData()
|
||||
self.new_title: str | None = None
|
||||
|
||||
@ -121,19 +122,21 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
self._gatewayscanner: GatewayScanner | None = None
|
||||
self._async_scan_gen: AsyncGenerator[GatewayDescriptor] | None = None
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> KNXOptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return KNXOptionsFlow(config_entry)
|
||||
|
||||
@property
|
||||
def _xknx(self) -> XKNX:
|
||||
"""Return XKNX instance."""
|
||||
if isinstance(self, OptionsFlow) and (
|
||||
if (self.source == SOURCE_RECONFIGURE) and (
|
||||
knx_module := self.hass.data.get(KNX_MODULE_KEY)
|
||||
):
|
||||
return knx_module.xknx
|
||||
return XKNX()
|
||||
|
||||
@abstractmethod
|
||||
def finish_flow(self) -> ConfigFlowResult:
|
||||
"""Finish the flow."""
|
||||
|
||||
@property
|
||||
def connection_type(self) -> str:
|
||||
"""Return the configured connection type."""
|
||||
@ -150,6 +153,61 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
self.initial_data.get(CONF_KNX_TUNNEL_ENDPOINT_IA),
|
||||
)
|
||||
|
||||
@callback
|
||||
def finish_flow(self) -> ConfigFlowResult:
|
||||
"""Create or update the ConfigEntry."""
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
entry = self._get_reconfigure_entry()
|
||||
_tunnel_endpoint_str = self.initial_data.get(
|
||||
CONF_KNX_TUNNEL_ENDPOINT_IA, "Tunneling"
|
||||
)
|
||||
if self.new_title and not entry.title.startswith(
|
||||
# Overwrite standard titles, but not user defined ones
|
||||
(
|
||||
f"KNX {self.initial_data[CONF_KNX_CONNECTION_TYPE]}",
|
||||
CONF_KNX_AUTOMATIC.capitalize(),
|
||||
"Tunneling @ ",
|
||||
f"{_tunnel_endpoint_str} @",
|
||||
"Tunneling UDP @ ",
|
||||
"Tunneling TCP @ ",
|
||||
"Secure Tunneling",
|
||||
"Routing as ",
|
||||
"Secure Routing as ",
|
||||
)
|
||||
):
|
||||
self.new_title = None
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=self.new_entry_data,
|
||||
title=self.new_title or UNDEFINED,
|
||||
)
|
||||
|
||||
title = self.new_title or f"KNX {self.new_entry_data[CONF_KNX_CONNECTION_TYPE]}"
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data=DEFAULT_ENTRY_DATA | self.new_entry_data,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
return await self.async_step_connection_type()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of existing entry."""
|
||||
entry = self._get_reconfigure_entry()
|
||||
self.initial_data = dict(entry.data) # type: ignore[assignment]
|
||||
return self.async_show_menu(
|
||||
step_id="reconfigure",
|
||||
menu_options=[
|
||||
"connection_type",
|
||||
"secure_knxkeys",
|
||||
],
|
||||
)
|
||||
|
||||
async def async_step_connection_type(
|
||||
self, user_input: dict | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@ -441,7 +499,7 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
)
|
||||
ip_address: str | None
|
||||
if ( # initial attempt on ConfigFlow or coming from automatic / routing
|
||||
(isinstance(self, ConfigFlow) or not _reconfiguring_existing_tunnel)
|
||||
not _reconfiguring_existing_tunnel
|
||||
and not user_input
|
||||
and self._selected_tunnel is not None
|
||||
): # default to first found tunnel
|
||||
@ -841,52 +899,20 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow):
|
||||
)
|
||||
|
||||
|
||||
class KNXConfigFlow(KNXCommonFlow, ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a KNX config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize KNX options flow."""
|
||||
super().__init__(initial_data=DEFAULT_ENTRY_DATA)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> KNXOptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return KNXOptionsFlow(config_entry)
|
||||
|
||||
@callback
|
||||
def finish_flow(self) -> ConfigFlowResult:
|
||||
"""Create the ConfigEntry."""
|
||||
title = self.new_title or f"KNX {self.new_entry_data[CONF_KNX_CONNECTION_TYPE]}"
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data=DEFAULT_ENTRY_DATA | self.new_entry_data,
|
||||
)
|
||||
|
||||
async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
return await self.async_step_connection_type()
|
||||
|
||||
|
||||
class KNXOptionsFlow(KNXCommonFlow, OptionsFlow):
|
||||
class KNXOptionsFlow(OptionsFlow):
|
||||
"""Handle KNX options."""
|
||||
|
||||
general_settings: dict
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize KNX options flow."""
|
||||
super().__init__(initial_data=config_entry.data) # type: ignore[arg-type]
|
||||
self.initial_data = dict(config_entry.data)
|
||||
|
||||
@callback
|
||||
def finish_flow(self) -> ConfigFlowResult:
|
||||
def finish_flow(self, new_entry_data: KNXConfigEntryData) -> ConfigFlowResult:
|
||||
"""Update the ConfigEntry and finish the flow."""
|
||||
new_data = DEFAULT_ENTRY_DATA | self.initial_data | self.new_entry_data
|
||||
new_data = self.initial_data | new_entry_data
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry,
|
||||
data=new_data,
|
||||
title=self.new_title or UNDEFINED,
|
||||
)
|
||||
return self.async_create_entry(title="", data={})
|
||||
|
||||
@ -894,26 +920,20 @@ class KNXOptionsFlow(KNXCommonFlow, OptionsFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage KNX options."""
|
||||
return self.async_show_menu(
|
||||
step_id="init",
|
||||
menu_options=[
|
||||
"connection_type",
|
||||
"communication_settings",
|
||||
"secure_knxkeys",
|
||||
],
|
||||
)
|
||||
return await self.async_step_communication_settings()
|
||||
|
||||
async def async_step_communication_settings(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage KNX communication settings."""
|
||||
if user_input is not None:
|
||||
self.new_entry_data = KNXConfigEntryData(
|
||||
state_updater=user_input[CONF_KNX_STATE_UPDATER],
|
||||
rate_limit=user_input[CONF_KNX_RATE_LIMIT],
|
||||
telegram_log_size=user_input[CONF_KNX_TELEGRAM_LOG_SIZE],
|
||||
return self.finish_flow(
|
||||
KNXConfigEntryData(
|
||||
state_updater=user_input[CONF_KNX_STATE_UPDATER],
|
||||
rate_limit=user_input[CONF_KNX_RATE_LIMIT],
|
||||
telegram_log_size=user_input[CONF_KNX_TELEGRAM_LOG_SIZE],
|
||||
)
|
||||
)
|
||||
return self.finish_flow()
|
||||
|
||||
data_schema = {
|
||||
vol.Required(
|
||||
|
@ -104,7 +104,7 @@ rules:
|
||||
Since all entities are configured manually, names are user-defined.
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
|
@ -1,6 +1,13 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"reconfigure": {
|
||||
"title": "KNX connection settings",
|
||||
"menu_options": {
|
||||
"connection_type": "Reconfigure KNX connection",
|
||||
"secure_knxkeys": "Import KNX keyring file"
|
||||
}
|
||||
},
|
||||
"connection_type": {
|
||||
"title": "KNX connection",
|
||||
"description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.)\n\n'Tunneling' will connect to a specific KNX IP interface over a tunnel.\n\n'Routing' will use Multicast to communicate with KNX IP routers.",
|
||||
@ -65,7 +72,7 @@
|
||||
},
|
||||
"secure_knxkeys": {
|
||||
"title": "Import KNX Keyring",
|
||||
"description": "The Keyring is used to encrypt and decrypt KNX IP Secure communication.",
|
||||
"description": "The keyring is used to encrypt and decrypt KNX IP Secure communication. You can import a new keyring file or re-import to update existing keys if your configuration has changed.",
|
||||
"data": {
|
||||
"knxkeys_file": "Keyring file",
|
||||
"knxkeys_password": "Keyring password"
|
||||
@ -129,6 +136,9 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_backbone_key": "Invalid backbone key. 32 hexadecimal digits expected.",
|
||||
@ -159,16 +169,8 @@
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "KNX Settings",
|
||||
"menu_options": {
|
||||
"connection_type": "Configure KNX interface",
|
||||
"communication_settings": "Communication settings",
|
||||
"secure_knxkeys": "Import a `.knxkeys` file"
|
||||
}
|
||||
},
|
||||
"communication_settings": {
|
||||
"title": "[%key:component::knx::options::step::init::menu_options::communication_settings%]",
|
||||
"title": "Communication settings",
|
||||
"data": {
|
||||
"state_updater": "State updater",
|
||||
"rate_limit": "Rate limit",
|
||||
@ -179,147 +181,7 @@
|
||||
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: `0` or between `20` and `40`",
|
||||
"telegram_log_size": "Telegrams to keep in memory for KNX panel group monitor. Maximum: {telegram_log_size_max}"
|
||||
}
|
||||
},
|
||||
"connection_type": {
|
||||
"title": "[%key:component::knx::config::step::connection_type::title%]",
|
||||
"description": "[%key:component::knx::config::step::connection_type::description%]",
|
||||
"data": {
|
||||
"connection_type": "[%key:component::knx::config::step::connection_type::data::connection_type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"connection_type": "[%key:component::knx::config::step::connection_type::data_description::connection_type%]"
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"title": "[%key:component::knx::config::step::tunnel::title%]",
|
||||
"data": {
|
||||
"gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]"
|
||||
},
|
||||
"data_description": {
|
||||
"gateway": "[%key:component::knx::config::step::tunnel::data_description::gateway%]"
|
||||
}
|
||||
},
|
||||
"tcp_tunnel_endpoint": {
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"manual_tunnel": {
|
||||
"title": "[%key:component::knx::config::step::manual_tunnel::title%]",
|
||||
"description": "[%key:component::knx::config::step::manual_tunnel::description%]",
|
||||
"data": {
|
||||
"tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data::tunneling_type%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"route_back": "[%key:component::knx::config::step::manual_tunnel::data::route_back%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data_description::tunneling_type%]",
|
||||
"port": "[%key:component::knx::config::step::manual_tunnel::data_description::port%]",
|
||||
"host": "[%key:component::knx::config::step::manual_tunnel::data_description::host%]",
|
||||
"route_back": "[%key:component::knx::config::step::manual_tunnel::data_description::route_back%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
|
||||
}
|
||||
},
|
||||
"secure_key_source_menu_tunnel": {
|
||||
"title": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::description%]",
|
||||
"menu_options": {
|
||||
"secure_knxkeys": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::menu_options::secure_knxkeys%]",
|
||||
"secure_tunnel_manual": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::menu_options::secure_tunnel_manual%]"
|
||||
}
|
||||
},
|
||||
"secure_key_source_menu_routing": {
|
||||
"title": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::description%]",
|
||||
"menu_options": {
|
||||
"secure_knxkeys": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::menu_options::secure_knxkeys%]",
|
||||
"secure_routing_manual": "[%key:component::knx::config::step::secure_key_source_menu_routing::menu_options::secure_routing_manual%]"
|
||||
}
|
||||
},
|
||||
"secure_knxkeys": {
|
||||
"title": "[%key:component::knx::config::step::secure_knxkeys::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_knxkeys::description%]",
|
||||
"data": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]"
|
||||
}
|
||||
},
|
||||
"knxkeys_tunnel_select": {
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"secure_tunnel_manual": {
|
||||
"title": "[%key:component::knx::config::step::secure_tunnel_manual::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_tunnel_manual::description%]",
|
||||
"data": {
|
||||
"user_id": "[%key:component::knx::config::step::secure_tunnel_manual::data::user_id%]",
|
||||
"user_password": "[%key:component::knx::config::step::secure_tunnel_manual::data::user_password%]",
|
||||
"device_authentication": "[%key:component::knx::config::step::secure_tunnel_manual::data::device_authentication%]"
|
||||
},
|
||||
"data_description": {
|
||||
"user_id": "[%key:component::knx::config::step::secure_tunnel_manual::data_description::user_id%]",
|
||||
"user_password": "[%key:component::knx::config::step::secure_tunnel_manual::data_description::user_password%]",
|
||||
"device_authentication": "[%key:component::knx::config::step::secure_tunnel_manual::data_description::device_authentication%]"
|
||||
}
|
||||
},
|
||||
"secure_routing_manual": {
|
||||
"title": "[%key:component::knx::config::step::secure_routing_manual::title%]",
|
||||
"description": "[%key:component::knx::config::step::secure_tunnel_manual::description%]",
|
||||
"data": {
|
||||
"backbone_key": "[%key:component::knx::config::step::secure_routing_manual::data::backbone_key%]",
|
||||
"sync_latency_tolerance": "[%key:component::knx::config::step::secure_routing_manual::data::sync_latency_tolerance%]"
|
||||
},
|
||||
"data_description": {
|
||||
"backbone_key": "[%key:component::knx::config::step::secure_routing_manual::data_description::backbone_key%]",
|
||||
"sync_latency_tolerance": "[%key:component::knx::config::step::secure_routing_manual::data_description::sync_latency_tolerance%]"
|
||||
}
|
||||
},
|
||||
"routing": {
|
||||
"title": "[%key:component::knx::config::step::routing::title%]",
|
||||
"description": "[%key:component::knx::config::step::routing::description%]",
|
||||
"data": {
|
||||
"individual_address": "[%key:component::knx::config::step::routing::data::individual_address%]",
|
||||
"routing_secure": "[%key:component::knx::config::step::routing::data::routing_secure%]",
|
||||
"multicast_group": "[%key:component::knx::config::step::routing::data::multicast_group%]",
|
||||
"multicast_port": "[%key:component::knx::config::step::routing::data::multicast_port%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"individual_address": "[%key:component::knx::config::step::routing::data_description::individual_address%]",
|
||||
"routing_secure": "[%key:component::knx::config::step::routing::data_description::routing_secure%]",
|
||||
"multicast_group": "[%key:component::knx::config::step::routing::data_description::multicast_group%]",
|
||||
"multicast_port": "[%key:component::knx::config::step::routing::data_description::multicast_port%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_backbone_key": "[%key:component::knx::config::error::invalid_backbone_key%]",
|
||||
"invalid_individual_address": "[%key:component::knx::config::error::invalid_individual_address%]",
|
||||
"invalid_ip_address": "[%key:component::knx::config::error::invalid_ip_address%]",
|
||||
"keyfile_no_backbone_key": "[%key:component::knx::config::error::keyfile_no_backbone_key%]",
|
||||
"keyfile_invalid_signature": "[%key:component::knx::config::error::keyfile_invalid_signature%]",
|
||||
"keyfile_no_tunnel_for_host": "[%key:component::knx::config::error::keyfile_no_tunnel_for_host%]",
|
||||
"keyfile_not_found": "[%key:component::knx::config::error::keyfile_not_found%]",
|
||||
"no_router_discovered": "[%key:component::knx::config::error::no_router_discovered%]",
|
||||
"no_tunnel_discovered": "[%key:component::knx::config::error::no_tunnel_discovered%]",
|
||||
"unsupported_tunnel_type": "[%key:component::knx::config::error::unsupported_tunnel_type%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from functools import wraps
|
||||
import inspect
|
||||
from typing import TYPE_CHECKING, Any, Final, overload
|
||||
|
||||
import knx_frontend as knx_panel
|
||||
@ -116,7 +116,7 @@ def provide_knx(
|
||||
"KNX integration not loaded.",
|
||||
)
|
||||
|
||||
if asyncio.iscoroutinefunction(func):
|
||||
if inspect.iscoroutinefunction(func):
|
||||
|
||||
@wraps(func)
|
||||
async def with_knx(
|
||||
|
@ -16,6 +16,11 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
@staticmethod
|
||||
def get_unique_id(device_id: str, description: EntityDescription) -> str:
|
||||
"""Generate a unique ID for the entity."""
|
||||
return f"{device_id}-{description.key}"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MieleDataUpdateCoordinator,
|
||||
@ -26,7 +31,7 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
|
||||
super().__init__(coordinator)
|
||||
self._device_id = device_id
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{device_id}-{description.key}"
|
||||
self._attr_unique_id = MieleEntity.get_unique_id(device_id, description)
|
||||
|
||||
device = self.device
|
||||
appliance_type = DEVICE_TYPE_TAGS.get(MieleAppliance(device.device_type))
|
||||
|
@ -7,7 +7,7 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Final, cast
|
||||
|
||||
from pymiele import MieleDevice
|
||||
from pymiele import MieleDevice, MieleTemperature
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@ -25,10 +25,13 @@ from homeassistant.const import (
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import (
|
||||
DISABLED_TEMP_ENTITIES,
|
||||
DOMAIN,
|
||||
STATE_PROGRAM_ID,
|
||||
STATE_PROGRAM_PHASE,
|
||||
STATE_STATUS_TAGS,
|
||||
@ -45,8 +48,6 @@ PARALLEL_UPDATES = 0
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DISABLED_TEMPERATURE = -32768
|
||||
|
||||
DEFAULT_PLATE_COUNT = 4
|
||||
|
||||
PLATE_COUNT = {
|
||||
@ -75,12 +76,25 @@ def _convert_duration(value_list: list[int]) -> int | None:
|
||||
return value_list[0] * 60 + value_list[1] if value_list else None
|
||||
|
||||
|
||||
def _convert_temperature(
|
||||
value_list: list[MieleTemperature], index: int
|
||||
) -> float | None:
|
||||
"""Convert temperature object to readable value."""
|
||||
if index >= len(value_list):
|
||||
return None
|
||||
raw_value = cast(int, value_list[index].temperature) / 100.0
|
||||
if raw_value in DISABLED_TEMP_ENTITIES:
|
||||
return None
|
||||
return raw_value
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class MieleSensorDescription(SensorEntityDescription):
|
||||
"""Class describing Miele sensor entities."""
|
||||
|
||||
value_fn: Callable[[MieleDevice], StateType]
|
||||
zone: int = 1
|
||||
zone: int | None = None
|
||||
unique_id_fn: Callable[[str, MieleSensorDescription], str] | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -404,32 +418,20 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_temperature_1",
|
||||
zone=1,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda value: cast(int, value.state_temperatures[0].temperature)
|
||||
/ 100.0,
|
||||
value_fn=lambda value: _convert_temperature(value.state_temperatures, 0),
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.DISH_WARMER,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.FRIDGE,
|
||||
MieleAppliance.FREEZER,
|
||||
MieleAppliance.FRIDGE_FREEZER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.WINE_CABINET,
|
||||
MieleAppliance.WINE_CONDITIONING_UNIT,
|
||||
MieleAppliance.WINE_STORAGE_CONDITIONING_UNIT,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.WINE_CABINET_FREEZER,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_temperature_2",
|
||||
@ -438,7 +440,24 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
translation_key="temperature_zone_2",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda value: value.state_temperatures[1].temperature / 100.0, # type: ignore [operator]
|
||||
value_fn=lambda value: _convert_temperature(value.state_temperatures, 1),
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WINE_CABINET,
|
||||
MieleAppliance.WINE_CONDITIONING_UNIT,
|
||||
MieleAppliance.WINE_STORAGE_CONDITIONING_UNIT,
|
||||
MieleAppliance.WINE_CABINET_FREEZER,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_temperature_3",
|
||||
zone=3,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
translation_key="temperature_zone_3",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda value: _convert_temperature(value.state_temperatures, 2),
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
@ -454,11 +473,8 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda value: cast(
|
||||
int, value.state_core_target_temperature[0].temperature
|
||||
)
|
||||
/ 100.0
|
||||
value_fn=lambda value: _convert_temperature(
|
||||
value.state_core_target_temperature, 0
|
||||
),
|
||||
),
|
||||
),
|
||||
@ -479,9 +495,8 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda value: cast(int, value.state_target_temperature[0].temperature)
|
||||
/ 100.0
|
||||
value_fn=lambda value: _convert_temperature(
|
||||
value.state_target_temperature, 0
|
||||
),
|
||||
),
|
||||
),
|
||||
@ -497,9 +512,8 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda value: cast(int, value.state_core_temperature[0].temperature)
|
||||
/ 100.0
|
||||
value_fn=lambda value: _convert_temperature(
|
||||
value.state_core_temperature, 0
|
||||
),
|
||||
),
|
||||
),
|
||||
@ -518,6 +532,8 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=sorted(PlatePowerStep.keys()),
|
||||
value_fn=lambda value: None,
|
||||
unique_id_fn=lambda device_id,
|
||||
description: f"{device_id}-{description.key}-{description.zone}",
|
||||
),
|
||||
)
|
||||
for i in range(1, 7)
|
||||
@ -539,6 +555,16 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
options=sorted(StateDryingStep.keys()),
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(MieleAppliance.ROBOT_VACUUM_CLEANER,),
|
||||
description=MieleSensorDescription(
|
||||
key="state_battery",
|
||||
value_fn=lambda value: value.state_battery_level,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@ -549,10 +575,52 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
coordinator = config_entry.runtime_data
|
||||
added_devices: set[str] = set()
|
||||
added_devices: set[str] = set() # device_id
|
||||
added_entities: set[str] = set() # unique_id
|
||||
|
||||
def _async_add_new_devices() -> None:
|
||||
nonlocal added_devices
|
||||
def _get_entity_class(definition: MieleSensorDefinition) -> type[MieleSensor]:
|
||||
"""Get the entity class for the sensor."""
|
||||
return {
|
||||
"state_status": MieleStatusSensor,
|
||||
"state_program_id": MieleProgramIdSensor,
|
||||
"state_program_phase": MielePhaseSensor,
|
||||
"state_plate_step": MielePlateSensor,
|
||||
}.get(definition.description.key, MieleSensor)
|
||||
|
||||
def _is_entity_registered(unique_id: str) -> bool:
|
||||
"""Check if the entity is already registered."""
|
||||
entity_registry = er.async_get(hass)
|
||||
return any(
|
||||
entry.platform == DOMAIN and entry.unique_id == unique_id
|
||||
for entry in entity_registry.entities.values()
|
||||
)
|
||||
|
||||
def _is_sensor_enabled(
|
||||
definition: MieleSensorDefinition,
|
||||
device: MieleDevice,
|
||||
unique_id: str,
|
||||
) -> bool:
|
||||
"""Check if the sensor is enabled."""
|
||||
if (
|
||||
definition.description.device_class == SensorDeviceClass.TEMPERATURE
|
||||
and definition.description.value_fn(device) is None
|
||||
and definition.description.zone != 1
|
||||
):
|
||||
# all appliances supporting temperature have at least zone 1, for other zones
|
||||
# don't create entity if API signals that datapoint is disabled, unless the sensor
|
||||
# already appeared in the past (= it provided a valid value)
|
||||
return _is_entity_registered(unique_id)
|
||||
if (
|
||||
definition.description.key == "state_plate_step"
|
||||
and definition.description.zone is not None
|
||||
and definition.description.zone > _get_plate_count(device.tech_type)
|
||||
):
|
||||
# don't create plate entity if not expected by the appliance tech type
|
||||
return False
|
||||
return True
|
||||
|
||||
def _async_add_devices() -> None:
|
||||
nonlocal added_devices, added_entities
|
||||
entities: list = []
|
||||
entity_class: type[MieleSensor]
|
||||
new_devices_set, current_devices = coordinator.async_add_devices(added_devices)
|
||||
@ -560,40 +628,35 @@ async def async_setup_entry(
|
||||
|
||||
for device_id, device in coordinator.data.devices.items():
|
||||
for definition in SENSOR_TYPES:
|
||||
if (
|
||||
device_id in new_devices_set
|
||||
and device.device_type in definition.types
|
||||
):
|
||||
match definition.description.key:
|
||||
case "state_status":
|
||||
entity_class = MieleStatusSensor
|
||||
case "state_program_id":
|
||||
entity_class = MieleProgramIdSensor
|
||||
case "state_program_phase":
|
||||
entity_class = MielePhaseSensor
|
||||
case "state_plate_step":
|
||||
entity_class = MielePlateSensor
|
||||
case _:
|
||||
entity_class = MieleSensor
|
||||
if (
|
||||
definition.description.device_class
|
||||
== SensorDeviceClass.TEMPERATURE
|
||||
and definition.description.value_fn(device)
|
||||
== DISABLED_TEMPERATURE / 100
|
||||
) or (
|
||||
definition.description.key == "state_plate_step"
|
||||
and definition.description.zone
|
||||
> _get_plate_count(device.tech_type)
|
||||
):
|
||||
# Don't create entity if API signals that datapoint is disabled
|
||||
continue
|
||||
entities.append(
|
||||
entity_class(coordinator, device_id, definition.description)
|
||||
# device is not supported, skip
|
||||
if device.device_type not in definition.types:
|
||||
continue
|
||||
|
||||
entity_class = _get_entity_class(definition)
|
||||
unique_id = (
|
||||
definition.description.unique_id_fn(
|
||||
device_id, definition.description
|
||||
)
|
||||
if definition.description.unique_id_fn is not None
|
||||
else MieleEntity.get_unique_id(device_id, definition.description)
|
||||
)
|
||||
|
||||
# entity was already added, skip
|
||||
if device_id not in new_devices_set and unique_id in added_entities:
|
||||
continue
|
||||
|
||||
# sensors is not enabled, skip
|
||||
if not _is_sensor_enabled(definition, device, unique_id):
|
||||
continue
|
||||
|
||||
added_entities.add(unique_id)
|
||||
entities.append(
|
||||
entity_class(coordinator, device_id, definition.description)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_async_add_new_devices))
|
||||
_async_add_new_devices()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_async_add_devices))
|
||||
_async_add_devices()
|
||||
|
||||
|
||||
APPLIANCE_ICONS = {
|
||||
@ -631,6 +694,17 @@ class MieleSensor(MieleEntity, SensorEntity):
|
||||
|
||||
entity_description: MieleSensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MieleDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
description: MieleSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, device_id, description)
|
||||
if description.unique_id_fn is not None:
|
||||
self._attr_unique_id = description.unique_id_fn(device_id, description)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
@ -642,16 +716,6 @@ class MielePlateSensor(MieleSensor):
|
||||
|
||||
entity_description: MieleSensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MieleDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
description: MieleSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize the plate sensor."""
|
||||
super().__init__(coordinator, device_id, description)
|
||||
self._attr_unique_id = f"{device_id}-{description.key}-{description.zone}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the plate sensor."""
|
||||
@ -662,7 +726,7 @@ class MielePlateSensor(MieleSensor):
|
||||
cast(
|
||||
int,
|
||||
self.device.state_plate_step[
|
||||
self.entity_description.zone - 1
|
||||
cast(int, self.entity_description.zone) - 1
|
||||
].value_raw,
|
||||
)
|
||||
).name
|
||||
|
@ -87,7 +87,6 @@ class MieleVacuumStateCode(MieleEnum):
|
||||
|
||||
SUPPORTED_FEATURES = (
|
||||
VacuumEntityFeature.STATE
|
||||
| VacuumEntityFeature.BATTERY
|
||||
| VacuumEntityFeature.FAN_SPEED
|
||||
| VacuumEntityFeature.START
|
||||
| VacuumEntityFeature.STOP
|
||||
@ -174,11 +173,6 @@ class MieleVacuum(MieleEntity, StateVacuumEntity):
|
||||
MieleVacuumStateCode(self.device.state_program_phase).value
|
||||
)
|
||||
|
||||
@property
|
||||
def battery_level(self) -> int | None:
|
||||
"""Return the battery level."""
|
||||
return self.device.state_battery_level
|
||||
|
||||
@property
|
||||
def fan_speed(self) -> str | None:
|
||||
"""Return the fan speed."""
|
||||
|
@ -19,7 +19,7 @@ from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL
|
||||
from .coordinator import NASwebCoordinator
|
||||
from .nasweb_data import NASwebData
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SWITCH]
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
NASWEB_CONFIG_URL = "https://{host}/page"
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Constants for the NASweb integration."""
|
||||
|
||||
DOMAIN = "nasweb"
|
||||
KEY_TEMP_SENSOR = "temp_sensor"
|
||||
MANUFACTURER = "chomtech.pl"
|
||||
STATUS_UPDATE_MAX_TIME_INTERVAL = 60
|
||||
SUPPORT_EMAIL = "support@chomtech.eu"
|
||||
|
@ -11,16 +11,19 @@ from typing import Any
|
||||
|
||||
from aiohttp.web import Request, Response
|
||||
from webio_api import WebioAPI
|
||||
from webio_api.const import KEY_DEVICE_SERIAL, KEY_OUTPUTS, KEY_TYPE, TYPE_STATUS_UPDATE
|
||||
from webio_api.const import KEY_DEVICE_SERIAL, KEY_TYPE, TYPE_STATUS_UPDATE
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import event
|
||||
from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol
|
||||
|
||||
from .const import STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
from .const import KEY_TEMP_SENSOR, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
KEY_INPUTS = "inputs"
|
||||
KEY_OUTPUTS = "outputs"
|
||||
|
||||
|
||||
class NotificationCoordinator:
|
||||
"""Coordinator redirecting push notifications for this integration to appropriate NASwebCoordinator."""
|
||||
@ -96,8 +99,11 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
self._job = HassJob(self._handle_max_update_interval, job_name)
|
||||
self._unsub_last_update_check: CALLBACK_TYPE | None = None
|
||||
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}
|
||||
data: dict[str, Any] = {}
|
||||
data[KEY_OUTPUTS] = self.webio_api.outputs
|
||||
data: dict[str, Any] = {
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
}
|
||||
self.async_set_updated_data(data)
|
||||
|
||||
def is_connection_confirmed(self) -> bool:
|
||||
@ -187,5 +193,9 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
async def process_status_update(self, new_status: dict) -> None:
|
||||
"""Process status update from NASweb."""
|
||||
self.webio_api.update_device_status(new_status)
|
||||
new_data = {KEY_OUTPUTS: self.webio_api.outputs}
|
||||
new_data = {
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
}
|
||||
self.async_set_updated_data(new_data)
|
||||
|
15
homeassistant/components/nasweb/icons.json
Normal file
15
homeassistant/components/nasweb/icons.json
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"sensor_input": {
|
||||
"default": "mdi:help-circle-outline",
|
||||
"state": {
|
||||
"tamper": "mdi:lock-alert",
|
||||
"active": "mdi:alert",
|
||||
"normal": "mdi:shield-check-outline",
|
||||
"problem": "mdi:alert-circle"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
189
homeassistant/components/nasweb/sensor.py
Normal file
189
homeassistant/components/nasweb/sensor.py
Normal file
@ -0,0 +1,189 @@
|
||||
"""Platform for NASweb sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from webio_api import Input as NASwebInput, TempSensor
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as DOMAIN_SENSOR,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
BaseCoordinatorEntity,
|
||||
BaseDataUpdateCoordinatorProtocol,
|
||||
)
|
||||
|
||||
from . import NASwebConfigEntry
|
||||
from .const import DOMAIN, KEY_TEMP_SENSOR, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
|
||||
SENSOR_INPUT_TRANSLATION_KEY = "sensor_input"
|
||||
STATE_UNDEFINED = "undefined"
|
||||
STATE_TAMPER = "tamper"
|
||||
STATE_ACTIVE = "active"
|
||||
STATE_NORMAL = "normal"
|
||||
STATE_PROBLEM = "problem"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: NASwebConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up Sensor platform."""
|
||||
coordinator = config.runtime_data
|
||||
current_inputs: set[int] = set()
|
||||
|
||||
@callback
|
||||
def _check_entities() -> None:
|
||||
received_inputs: dict[int, NASwebInput] = {
|
||||
entry.index: entry for entry in coordinator.webio_api.inputs
|
||||
}
|
||||
added = {i for i in received_inputs if i not in current_inputs}
|
||||
removed = {i for i in current_inputs if i not in received_inputs}
|
||||
entities_to_add: list[InputStateSensor] = []
|
||||
for index in added:
|
||||
webio_input = received_inputs[index]
|
||||
if not isinstance(webio_input, NASwebInput):
|
||||
_LOGGER.error("Cannot create InputStateSensor without NASwebInput")
|
||||
continue
|
||||
new_input = InputStateSensor(coordinator, webio_input)
|
||||
entities_to_add.append(new_input)
|
||||
current_inputs.add(index)
|
||||
async_add_entities(entities_to_add)
|
||||
entity_registry = er.async_get(hass)
|
||||
for index in removed:
|
||||
unique_id = f"{DOMAIN}.{config.unique_id}.input.{index}"
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
DOMAIN_SENSOR, DOMAIN, unique_id
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
current_inputs.remove(index)
|
||||
else:
|
||||
_LOGGER.warning("Failed to remove old input: no entity_id")
|
||||
|
||||
coordinator.async_add_listener(_check_entities)
|
||||
_check_entities()
|
||||
|
||||
nasweb_temp_sensor = coordinator.data[KEY_TEMP_SENSOR]
|
||||
temp_sensor = TemperatureSensor(coordinator, nasweb_temp_sensor)
|
||||
async_add_entities([temp_sensor])
|
||||
|
||||
|
||||
class BaseSensorEntity(SensorEntity, BaseCoordinatorEntity):
|
||||
"""Base class providing common functionality."""
|
||||
|
||||
def __init__(self, coordinator: BaseDataUpdateCoordinatorProtocol) -> None:
|
||||
"""Initialize base sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_available = False
|
||||
self._attr_has_entity_name = True
|
||||
self._attr_should_poll = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._handle_coordinator_update()
|
||||
|
||||
def _set_attr_available(
|
||||
self, entity_last_update: float, available: bool | None
|
||||
) -> None:
|
||||
if (
|
||||
self.coordinator.last_update is None
|
||||
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = available if available is not None else False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity.
|
||||
|
||||
Only used by the generic entity update service.
|
||||
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
|
||||
"""
|
||||
|
||||
|
||||
class InputStateSensor(BaseSensorEntity):
|
||||
"""Entity representing NASweb input."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.ENUM
|
||||
_attr_options: list[str] = [
|
||||
STATE_UNDEFINED,
|
||||
STATE_TAMPER,
|
||||
STATE_ACTIVE,
|
||||
STATE_NORMAL,
|
||||
STATE_PROBLEM,
|
||||
]
|
||||
_attr_translation_key = SENSOR_INPUT_TRANSLATION_KEY
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BaseDataUpdateCoordinatorProtocol,
|
||||
nasweb_input: NASwebInput,
|
||||
) -> None:
|
||||
"""Initialize InputStateSensor entity."""
|
||||
super().__init__(coordinator)
|
||||
self._input = nasweb_input
|
||||
self._attr_native_value: str | None = None
|
||||
self._attr_translation_placeholders = {"index": f"{nasweb_input.index:2d}"}
|
||||
self._attr_unique_id = (
|
||||
f"{DOMAIN}.{self._input.webio_serial}.input.{self._input.index}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._input.webio_serial)},
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if self._input.state is None or self._input.state in self._attr_options:
|
||||
self._attr_native_value = self._input.state
|
||||
else:
|
||||
_LOGGER.warning("Received unrecognized input state: %s", self._input.state)
|
||||
self._attr_native_value = None
|
||||
self._set_attr_available(self._input.last_update, self._input.available)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class TemperatureSensor(BaseSensorEntity):
|
||||
"""Entity representing NASweb temperature sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BaseDataUpdateCoordinatorProtocol,
|
||||
nasweb_temp_sensor: TempSensor,
|
||||
) -> None:
|
||||
"""Initialize TemperatureSensor entity."""
|
||||
super().__init__(coordinator)
|
||||
self._temp_sensor = nasweb_temp_sensor
|
||||
self._attr_unique_id = f"{DOMAIN}.{self._temp_sensor.webio_serial}.temp_sensor"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._temp_sensor.webio_serial)}
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_native_value = self._temp_sensor.value
|
||||
self._set_attr_available(
|
||||
self._temp_sensor.last_update, self._temp_sensor.available
|
||||
)
|
||||
self.async_write_ha_state()
|
@ -45,6 +45,18 @@
|
||||
"switch_output": {
|
||||
"name": "Relay Switch {index}"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"sensor_input": {
|
||||
"name": "Input {index}",
|
||||
"state": {
|
||||
"undefined": "Undefined",
|
||||
"tamper": "Tamper",
|
||||
"active": "Active",
|
||||
"normal": "Normal",
|
||||
"problem": "Problem"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -38,6 +38,7 @@ from .const import (
|
||||
ATTR_HEATING_POWER_REQUEST,
|
||||
ATTR_SCHEDULE_NAME,
|
||||
ATTR_SELECTED_SCHEDULE,
|
||||
ATTR_SELECTED_SCHEDULE_ID,
|
||||
ATTR_TARGET_TEMPERATURE,
|
||||
ATTR_TIME_PERIOD,
|
||||
DATA_SCHEDULES,
|
||||
@ -251,16 +252,22 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity):
|
||||
if data["event_type"] == EVENT_TYPE_SCHEDULE:
|
||||
# handle schedule change
|
||||
if "schedule_id" in data:
|
||||
selected_schedule = self.hass.data[DOMAIN][DATA_SCHEDULES][
|
||||
self.home.entity_id
|
||||
].get(data["schedule_id"])
|
||||
self._selected_schedule = getattr(
|
||||
self.hass.data[DOMAIN][DATA_SCHEDULES][self.home.entity_id].get(
|
||||
data["schedule_id"]
|
||||
),
|
||||
selected_schedule,
|
||||
"name",
|
||||
None,
|
||||
)
|
||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
||||
self._selected_schedule
|
||||
)
|
||||
|
||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE_ID] = getattr(
|
||||
selected_schedule, "entity_id", None
|
||||
)
|
||||
|
||||
self.async_write_ha_state()
|
||||
self.data_handler.async_force_update(self._signal_name)
|
||||
# ignore other schedule events
|
||||
@ -420,12 +427,14 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity):
|
||||
self._attr_hvac_mode = HVAC_MAP_NETATMO[self._attr_preset_mode]
|
||||
self._away = self._attr_hvac_mode == HVAC_MAP_NETATMO[STATE_NETATMO_AWAY]
|
||||
|
||||
self._selected_schedule = getattr(
|
||||
self.home.get_selected_schedule(), "name", None
|
||||
)
|
||||
selected_schedule = self.home.get_selected_schedule()
|
||||
self._selected_schedule = getattr(selected_schedule, "name", None)
|
||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
||||
self._selected_schedule
|
||||
)
|
||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE_ID] = getattr(
|
||||
selected_schedule, "entity_id", None
|
||||
)
|
||||
|
||||
if self.device_type == NA_VALVE:
|
||||
self._attr_extra_state_attributes[ATTR_HEATING_POWER_REQUEST] = (
|
||||
|
@ -95,6 +95,7 @@ ATTR_PSEUDO = "pseudo"
|
||||
ATTR_SCHEDULE_ID = "schedule_id"
|
||||
ATTR_SCHEDULE_NAME = "schedule_name"
|
||||
ATTR_SELECTED_SCHEDULE = "selected_schedule"
|
||||
ATTR_SELECTED_SCHEDULE_ID = "selected_schedule_id"
|
||||
ATTR_TARGET_TEMPERATURE = "target_temperature"
|
||||
ATTR_TIME_PERIOD = "time_period"
|
||||
|
||||
|
@ -13,7 +13,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PERCENTAGE, UnitOfTemperature
|
||||
from homeassistant.const import PERCENTAGE, UnitOfInformation, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@ -84,6 +84,8 @@ async def async_setup_entry(
|
||||
OctoPrintJobPercentageSensor(coordinator, device_id),
|
||||
OctoPrintEstimatedFinishTimeSensor(coordinator, device_id),
|
||||
OctoPrintStartTimeSensor(coordinator, device_id),
|
||||
OctoPrintFileNameSensor(coordinator, device_id),
|
||||
OctoPrintFileSizeSensor(coordinator, device_id),
|
||||
]
|
||||
|
||||
async_add_entities(entities)
|
||||
@ -262,3 +264,61 @@ class OctoPrintTemperatureSensor(OctoPrintSensorBase):
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return self.coordinator.last_update_success and self.coordinator.data["printer"]
|
||||
|
||||
|
||||
class OctoPrintFileNameSensor(OctoPrintSensorBase):
|
||||
"""Representation of an OctoPrint sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: OctoprintDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize a new OctoPrint sensor."""
|
||||
super().__init__(coordinator, "Current File", device_id)
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return sensor state."""
|
||||
job: OctoprintJobInfo = self.coordinator.data["job"]
|
||||
|
||||
return job.job.file.name or None
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
if not self.coordinator.last_update_success:
|
||||
return False
|
||||
job: OctoprintJobInfo = self.coordinator.data["job"]
|
||||
return job and job.job.file.name
|
||||
|
||||
|
||||
class OctoPrintFileSizeSensor(OctoPrintSensorBase):
|
||||
"""Representation of an OctoPrint sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.DATA_SIZE
|
||||
_attr_native_unit_of_measurement = UnitOfInformation.BYTES
|
||||
_attr_suggested_unit_of_measurement = UnitOfInformation.MEGABYTES
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: OctoprintDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize a new OctoPrint sensor."""
|
||||
super().__init__(coordinator, "Current File Size", device_id)
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | None:
|
||||
"""Return sensor state."""
|
||||
job: OctoprintJobInfo = self.coordinator.data["job"]
|
||||
|
||||
return job.job.file.size or None
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
if not self.coordinator.last_update_success:
|
||||
return False
|
||||
job: OctoprintJobInfo = self.coordinator.data["job"]
|
||||
return job and job.job.file.size
|
||||
|
@ -6,7 +6,12 @@ from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONF_NPSSO
|
||||
from .coordinator import PlaystationNetworkConfigEntry, PlaystationNetworkCoordinator
|
||||
from .coordinator import (
|
||||
PlaystationNetworkConfigEntry,
|
||||
PlaystationNetworkRuntimeData,
|
||||
PlaystationNetworkTrophyTitlesCoordinator,
|
||||
PlaystationNetworkUserDataCoordinator,
|
||||
)
|
||||
from .helpers import PlaystationNetwork
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
@ -23,9 +28,12 @@ async def async_setup_entry(
|
||||
|
||||
psn = PlaystationNetwork(hass, entry.data[CONF_NPSSO])
|
||||
|
||||
coordinator = PlaystationNetworkCoordinator(hass, psn, entry)
|
||||
coordinator = PlaystationNetworkUserDataCoordinator(hass, psn, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
trophy_titles = PlaystationNetworkTrophyTitlesCoordinator(hass, psn, entry)
|
||||
|
||||
entry.runtime_data = PlaystationNetworkRuntimeData(coordinator, trophy_titles)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
@ -49,7 +49,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the binary sensor platform."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data.user_data
|
||||
async_add_entities(
|
||||
PlaystationNetworkBinarySensorEntity(coordinator, description)
|
||||
for description in BINARY_SENSOR_DESCRIPTIONS
|
||||
|
@ -10,7 +10,6 @@ from psnawp_api.core.psnawp_exceptions import (
|
||||
PSNAWPInvalidTokenError,
|
||||
PSNAWPNotFoundError,
|
||||
)
|
||||
from psnawp_api.models.user import User
|
||||
from psnawp_api.utils.misc import parse_npsso_token
|
||||
import voluptuous as vol
|
||||
|
||||
@ -42,7 +41,7 @@ class PlaystationNetworkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
psn = PlaystationNetwork(self.hass, npsso)
|
||||
try:
|
||||
user: User = await psn.get_user()
|
||||
user = await psn.get_user()
|
||||
except PSNAWPAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except PSNAWPNotFoundError:
|
||||
@ -98,7 +97,7 @@ class PlaystationNetworkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
npsso = parse_npsso_token(user_input[CONF_NPSSO])
|
||||
psn = PlaystationNetwork(self.hass, npsso)
|
||||
user: User = await psn.get_user()
|
||||
user = await psn.get_user()
|
||||
except PSNAWPAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except (PSNAWPNotFoundError, PSNAWPInvalidTokenError):
|
||||
|
@ -8,9 +8,10 @@ DOMAIN = "playstation_network"
|
||||
CONF_NPSSO: Final = "npsso"
|
||||
|
||||
SUPPORTED_PLATFORMS = {
|
||||
PlatformType.PS5,
|
||||
PlatformType.PS4,
|
||||
PlatformType.PS_VITA,
|
||||
PlatformType.PS3,
|
||||
PlatformType.PS4,
|
||||
PlatformType.PS5,
|
||||
PlatformType.PSPC,
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
@ -10,6 +12,7 @@ from psnawp_api.core.psnawp_exceptions import (
|
||||
PSNAWPClientError,
|
||||
PSNAWPServerError,
|
||||
)
|
||||
from psnawp_api.models.trophies import TrophyTitle
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -21,13 +24,22 @@ from .helpers import PlaystationNetwork, PlaystationNetworkData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type PlaystationNetworkConfigEntry = ConfigEntry[PlaystationNetworkCoordinator]
|
||||
type PlaystationNetworkConfigEntry = ConfigEntry[PlaystationNetworkRuntimeData]
|
||||
|
||||
|
||||
class PlaystationNetworkCoordinator(DataUpdateCoordinator[PlaystationNetworkData]):
|
||||
"""Data update coordinator for PSN."""
|
||||
@dataclass
|
||||
class PlaystationNetworkRuntimeData:
|
||||
"""Dataclass holding PSN runtime data."""
|
||||
|
||||
user_data: PlaystationNetworkUserDataCoordinator
|
||||
trophy_titles: PlaystationNetworkTrophyTitlesCoordinator
|
||||
|
||||
|
||||
class PlayStationNetworkBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
|
||||
"""Base coordinator for PSN."""
|
||||
|
||||
config_entry: PlaystationNetworkConfigEntry
|
||||
_update_inverval: timedelta
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -41,16 +53,43 @@ class PlaystationNetworkCoordinator(DataUpdateCoordinator[PlaystationNetworkData
|
||||
name=DOMAIN,
|
||||
logger=_LOGGER,
|
||||
config_entry=config_entry,
|
||||
update_interval=timedelta(seconds=30),
|
||||
update_interval=self._update_interval,
|
||||
)
|
||||
|
||||
self.psn = psn
|
||||
|
||||
@abstractmethod
|
||||
async def update_data(self) -> _DataT:
|
||||
"""Update coordinator data."""
|
||||
|
||||
async def _async_update_data(self) -> _DataT:
|
||||
"""Get the latest data from the PSN."""
|
||||
try:
|
||||
return await self.update_data()
|
||||
except PSNAWPAuthenticationError as error:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready",
|
||||
) from error
|
||||
except (PSNAWPServerError, PSNAWPClientError) as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from error
|
||||
|
||||
|
||||
class PlaystationNetworkUserDataCoordinator(
|
||||
PlayStationNetworkBaseCoordinator[PlaystationNetworkData]
|
||||
):
|
||||
"""Data update coordinator for PSN."""
|
||||
|
||||
_update_interval = timedelta(seconds=30)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
|
||||
try:
|
||||
await self.psn.get_user()
|
||||
await self.psn.async_setup()
|
||||
except PSNAWPAuthenticationError as error:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@ -62,17 +101,22 @@ class PlaystationNetworkCoordinator(DataUpdateCoordinator[PlaystationNetworkData
|
||||
translation_key="update_failed",
|
||||
) from error
|
||||
|
||||
async def _async_update_data(self) -> PlaystationNetworkData:
|
||||
async def update_data(self) -> PlaystationNetworkData:
|
||||
"""Get the latest data from the PSN."""
|
||||
try:
|
||||
return await self.psn.get_data()
|
||||
except PSNAWPAuthenticationError as error:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready",
|
||||
) from error
|
||||
except (PSNAWPServerError, PSNAWPClientError) as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from error
|
||||
return await self.psn.get_data()
|
||||
|
||||
|
||||
class PlaystationNetworkTrophyTitlesCoordinator(
|
||||
PlayStationNetworkBaseCoordinator[list[TrophyTitle]]
|
||||
):
|
||||
"""Trophy titles data update coordinator for PSN."""
|
||||
|
||||
_update_interval = timedelta(days=1)
|
||||
|
||||
async def update_data(self) -> list[TrophyTitle]:
|
||||
"""Update trophy titles data."""
|
||||
self.psn.trophy_titles = await self.hass.async_add_executor_job(
|
||||
lambda: list(self.psn.user.trophy_titles())
|
||||
)
|
||||
await self.config_entry.runtime_data.user_data.async_request_refresh()
|
||||
return self.psn.trophy_titles
|
||||
|
@ -10,7 +10,7 @@ from psnawp_api.models.trophies import PlatformType
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import PlaystationNetworkConfigEntry, PlaystationNetworkCoordinator
|
||||
from .coordinator import PlaystationNetworkConfigEntry
|
||||
|
||||
TO_REDACT = {
|
||||
"account_id",
|
||||
@ -27,12 +27,12 @@ async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: PlaystationNetworkConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: PlaystationNetworkCoordinator = entry.runtime_data
|
||||
coordinator = entry.runtime_data.user_data
|
||||
|
||||
return {
|
||||
"data": async_redact_data(
|
||||
_serialize_platform_types(asdict(coordinator.data)), TO_REDACT
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@ -46,10 +46,12 @@ def _serialize_platform_types(data: Any) -> Any:
|
||||
for platform, record in data.items()
|
||||
}
|
||||
if isinstance(data, set):
|
||||
return [
|
||||
record.value if isinstance(record, PlatformType) else record
|
||||
for record in data
|
||||
]
|
||||
return sorted(
|
||||
[
|
||||
record.value if isinstance(record, PlatformType) else record
|
||||
for record in data
|
||||
]
|
||||
)
|
||||
if isinstance(data, PlatformType):
|
||||
return data.value
|
||||
return data
|
||||
|
@ -7,17 +7,19 @@ from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import PlaystationNetworkCoordinator
|
||||
from .coordinator import PlaystationNetworkUserDataCoordinator
|
||||
|
||||
|
||||
class PlaystationNetworkServiceEntity(CoordinatorEntity[PlaystationNetworkCoordinator]):
|
||||
class PlaystationNetworkServiceEntity(
|
||||
CoordinatorEntity[PlaystationNetworkUserDataCoordinator]
|
||||
):
|
||||
"""Common entity class for PlayStationNetwork Service entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PlaystationNetworkCoordinator,
|
||||
coordinator: PlaystationNetworkUserDataCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize PlayStation Network Service Entity."""
|
||||
|
@ -8,7 +8,7 @@ from typing import Any
|
||||
|
||||
from psnawp_api import PSNAWP
|
||||
from psnawp_api.models.client import Client
|
||||
from psnawp_api.models.trophies import PlatformType, TrophySummary
|
||||
from psnawp_api.models.trophies import PlatformType, TrophySummary, TrophyTitle
|
||||
from psnawp_api.models.user import User
|
||||
from pyrate_limiter import Duration, Rate
|
||||
|
||||
@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import SUPPORTED_PLATFORMS
|
||||
|
||||
LEGACY_PLATFORMS = {PlatformType.PS3, PlatformType.PS4}
|
||||
LEGACY_PLATFORMS = {PlatformType.PS3, PlatformType.PS4, PlatformType.PS_VITA}
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -52,10 +52,22 @@ class PlaystationNetwork:
|
||||
"""Initialize the class with the npsso token."""
|
||||
rate = Rate(300, Duration.MINUTE * 15)
|
||||
self.psn = PSNAWP(npsso, rate_limit=rate)
|
||||
self.client: Client | None = None
|
||||
self.client: Client
|
||||
self.hass = hass
|
||||
self.user: User
|
||||
self.legacy_profile: dict[str, Any] | None = None
|
||||
self.trophy_titles: list[TrophyTitle] = []
|
||||
self._title_icon_urls: dict[str, str] = {}
|
||||
|
||||
def _setup(self) -> None:
|
||||
"""Setup PSN."""
|
||||
self.user = self.psn.user(online_id="me")
|
||||
self.client = self.psn.me()
|
||||
self.trophy_titles = list(self.user.trophy_titles())
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Setup PSN."""
|
||||
await self.hass.async_add_executor_job(self._setup)
|
||||
|
||||
async def get_user(self) -> User:
|
||||
"""Get the user object from the PlayStation Network."""
|
||||
@ -68,9 +80,6 @@ class PlaystationNetwork:
|
||||
"""Bundle api calls to retrieve data from the PlayStation Network."""
|
||||
data = PlaystationNetworkData()
|
||||
|
||||
if not self.client:
|
||||
self.client = self.psn.me()
|
||||
|
||||
data.registered_platforms = {
|
||||
PlatformType(device["deviceType"])
|
||||
for device in self.client.get_account_devices()
|
||||
@ -123,7 +132,7 @@ class PlaystationNetwork:
|
||||
presence = self.legacy_profile["profile"].get("presences", [])
|
||||
if (game_title_info := presence[0] if presence else {}) and game_title_info[
|
||||
"onlineStatus"
|
||||
] == "online":
|
||||
] != "offline":
|
||||
platform = PlatformType(game_title_info["platform"])
|
||||
|
||||
if platform is PlatformType.PS4:
|
||||
@ -135,6 +144,10 @@ class PlaystationNetwork:
|
||||
account_id="me",
|
||||
np_communication_id="",
|
||||
).get_title_icon_url()
|
||||
elif platform is PlatformType.PS_VITA and game_title_info.get(
|
||||
"npTitleId"
|
||||
):
|
||||
media_image_url = self.get_psvita_title_icon_url(game_title_info)
|
||||
else:
|
||||
media_image_url = None
|
||||
|
||||
@ -147,3 +160,28 @@ class PlaystationNetwork:
|
||||
status=game_title_info["onlineStatus"],
|
||||
)
|
||||
return data
|
||||
|
||||
def get_psvita_title_icon_url(self, game_title_info: dict[str, Any]) -> str | None:
|
||||
"""Look up title_icon_url from trophy titles data."""
|
||||
|
||||
if url := self._title_icon_urls.get(game_title_info["npTitleId"]):
|
||||
return url
|
||||
|
||||
url = next(
|
||||
(
|
||||
title.title_icon_url
|
||||
for title in self.trophy_titles
|
||||
if game_title_info["titleName"]
|
||||
== normalize_title(title.title_name or "")
|
||||
and next(iter(title.title_platform)) == PlatformType.PS_VITA
|
||||
),
|
||||
None,
|
||||
)
|
||||
if url is not None:
|
||||
self._title_icon_urls[game_title_info["npTitleId"]] = url
|
||||
return url
|
||||
|
||||
|
||||
def normalize_title(name: str) -> str:
|
||||
"""Normalize trophy title."""
|
||||
return name.removesuffix("Trophies").removesuffix("Trophy Set").strip()
|
||||
|
@ -17,13 +17,18 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import PlaystationNetworkConfigEntry, PlaystationNetworkCoordinator
|
||||
from . import (
|
||||
PlaystationNetworkConfigEntry,
|
||||
PlaystationNetworkTrophyTitlesCoordinator,
|
||||
PlaystationNetworkUserDataCoordinator,
|
||||
)
|
||||
from .const import DOMAIN, SUPPORTED_PLATFORMS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PLATFORM_MAP = {
|
||||
PlatformType.PS_VITA: "PlayStation Vita",
|
||||
PlatformType.PS5: "PlayStation 5",
|
||||
PlatformType.PS4: "PlayStation 4",
|
||||
PlatformType.PS3: "PlayStation 3",
|
||||
@ -38,7 +43,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Media Player Entity Setup."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data.user_data
|
||||
trophy_titles = config_entry.runtime_data.trophy_titles
|
||||
devices_added: set[PlatformType] = set()
|
||||
device_reg = dr.async_get(hass)
|
||||
entities = []
|
||||
@ -50,10 +56,12 @@ async def async_setup_entry(
|
||||
if not SUPPORTED_PLATFORMS - devices_added:
|
||||
remove_listener()
|
||||
|
||||
new_platforms = set(coordinator.data.active_sessions.keys()) - devices_added
|
||||
new_platforms = (
|
||||
set(coordinator.data.active_sessions.keys()) & SUPPORTED_PLATFORMS
|
||||
) - devices_added
|
||||
if new_platforms:
|
||||
async_add_entities(
|
||||
PsnMediaPlayerEntity(coordinator, platform_type)
|
||||
PsnMediaPlayerEntity(coordinator, platform_type, trophy_titles)
|
||||
for platform_type in new_platforms
|
||||
)
|
||||
devices_added |= new_platforms
|
||||
@ -64,7 +72,7 @@ async def async_setup_entry(
|
||||
(DOMAIN, f"{coordinator.config_entry.unique_id}_{platform.value}")
|
||||
}
|
||||
):
|
||||
entities.append(PsnMediaPlayerEntity(coordinator, platform))
|
||||
entities.append(PsnMediaPlayerEntity(coordinator, platform, trophy_titles))
|
||||
devices_added.add(platform)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
@ -74,7 +82,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
class PsnMediaPlayerEntity(
|
||||
CoordinatorEntity[PlaystationNetworkCoordinator], MediaPlayerEntity
|
||||
CoordinatorEntity[PlaystationNetworkUserDataCoordinator], MediaPlayerEntity
|
||||
):
|
||||
"""Media player entity representing currently playing game."""
|
||||
|
||||
@ -86,7 +94,10 @@ class PsnMediaPlayerEntity(
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self, coordinator: PlaystationNetworkCoordinator, platform: PlatformType
|
||||
self,
|
||||
coordinator: PlaystationNetworkUserDataCoordinator,
|
||||
platform: PlatformType,
|
||||
trophy_titles: PlaystationNetworkTrophyTitlesCoordinator,
|
||||
) -> None:
|
||||
"""Initialize PSN MediaPlayer."""
|
||||
super().__init__(coordinator)
|
||||
@ -101,15 +112,21 @@ class PsnMediaPlayerEntity(
|
||||
model=PLATFORM_MAP[platform],
|
||||
via_device=(DOMAIN, coordinator.config_entry.unique_id),
|
||||
)
|
||||
self.trophy_titles = trophy_titles
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState:
|
||||
"""Media Player state getter."""
|
||||
session = self.coordinator.data.active_sessions.get(self.key)
|
||||
if session and session.status == "online":
|
||||
if session.title_id is not None:
|
||||
return MediaPlayerState.PLAYING
|
||||
return MediaPlayerState.ON
|
||||
if session:
|
||||
if session.status == "online":
|
||||
return (
|
||||
MediaPlayerState.PLAYING
|
||||
if session.title_id is not None
|
||||
else MediaPlayerState.ON
|
||||
)
|
||||
if session.status == "standby":
|
||||
return MediaPlayerState.STANDBY
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
@property
|
||||
@ -129,3 +146,12 @@ class PsnMediaPlayerEntity(
|
||||
"""Media image url getter."""
|
||||
session = self.coordinator.data.active_sessions.get(self.key)
|
||||
return session.media_image_url if session else None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
await super().async_added_to_hass()
|
||||
if self.key is PlatformType.PS_VITA:
|
||||
self.async_on_remove(
|
||||
self.trophy_titles.async_add_listener(self._handle_coordinator_update)
|
||||
)
|
||||
|
@ -131,7 +131,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data.user_data
|
||||
async_add_entities(
|
||||
PlaystationNetworkSensorEntity(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
|
@ -12,6 +12,7 @@ from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.pool import (
|
||||
ConnectionPoolEntry,
|
||||
NullPool,
|
||||
PoolProxiedConnection,
|
||||
SingletonThreadPool,
|
||||
StaticPool,
|
||||
)
|
||||
@ -119,6 +120,12 @@ class RecorderPool(SingletonThreadPool, NullPool):
|
||||
)
|
||||
return NullPool._create_connection(self) # noqa: SLF001
|
||||
|
||||
def connect(self) -> PoolProxiedConnection:
|
||||
"""Return a connection from the pool."""
|
||||
if threading.get_ident() in self.recorder_and_worker_thread_ids:
|
||||
return super().connect()
|
||||
return NullPool.connect(self)
|
||||
|
||||
|
||||
class MutexPool(StaticPool):
|
||||
"""A pool which prevents concurrent accesses from multiple threads.
|
||||
|
@ -868,8 +868,8 @@ RPC_SENSORS: Final = {
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
available=lambda status: (status and status["n_current"]) is not None,
|
||||
removal_condition=lambda _config, status, _key: "n_current" not in status,
|
||||
removal_condition=lambda _config, status, key: status[key].get("n_current")
|
||||
is None,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"total_current": RpcSensorDescription(
|
||||
|
@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.2.7"]
|
||||
"requirements": ["pysmartthings==3.2.8"]
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""The Squeezebox integration."""
|
||||
|
||||
from asyncio import timeout
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
@ -37,8 +37,6 @@ from .const import (
|
||||
DISCOVERY_INTERVAL,
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
KNOWN_PLAYERS,
|
||||
KNOWN_SERVERS,
|
||||
SERVER_MANUFACTURER,
|
||||
SERVER_MODEL,
|
||||
SERVER_MODEL_ID,
|
||||
@ -73,6 +71,7 @@ class SqueezeboxData:
|
||||
|
||||
coordinator: LMSStatusDataUpdateCoordinator
|
||||
server: Server
|
||||
known_player_ids: set[str] = field(default_factory=set)
|
||||
|
||||
|
||||
type SqueezeboxConfigEntry = ConfigEntry[SqueezeboxData]
|
||||
@ -187,16 +186,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -
|
||||
|
||||
entry.runtime_data = SqueezeboxData(coordinator=server_coordinator, server=lms)
|
||||
|
||||
# set up player discovery
|
||||
known_servers = hass.data.setdefault(DOMAIN, {}).setdefault(KNOWN_SERVERS, {})
|
||||
known_players = known_servers.setdefault(lms.uuid, {}).setdefault(KNOWN_PLAYERS, [])
|
||||
|
||||
async def _player_discovery(now: datetime | None = None) -> None:
|
||||
"""Discover squeezebox players by polling server."""
|
||||
|
||||
async def _discovered_player(player: Player) -> None:
|
||||
"""Handle a (re)discovered player."""
|
||||
if player.player_id in known_players:
|
||||
if player.player_id in entry.runtime_data.known_player_ids:
|
||||
await player.async_update()
|
||||
async_dispatcher_send(
|
||||
hass, SIGNAL_PLAYER_REDISCOVERED, player.player_id, player.connected
|
||||
@ -207,7 +202,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -
|
||||
hass, entry, player, lms.uuid
|
||||
)
|
||||
await player_coordinator.async_refresh()
|
||||
known_players.append(player.player_id)
|
||||
entry.runtime_data.known_player_ids.add(player.player_id)
|
||||
async_dispatcher_send(
|
||||
hass, SIGNAL_PLAYER_DISCOVERED, player_coordinator
|
||||
)
|
||||
|
@ -4,8 +4,6 @@ CONF_HTTPS = "https"
|
||||
DISCOVERY_TASK = "discovery_task"
|
||||
DOMAIN = "squeezebox"
|
||||
DEFAULT_PORT = 9000
|
||||
KNOWN_PLAYERS = "known_players"
|
||||
KNOWN_SERVERS = "known_servers"
|
||||
PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub"
|
||||
SENSOR_UPDATE_INTERVAL = 60
|
||||
SERVER_MANUFACTURER = "https://lyrion.org/"
|
||||
|
@ -60,8 +60,6 @@ from .const import (
|
||||
DEFAULT_VOLUME_STEP,
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
KNOWN_PLAYERS,
|
||||
KNOWN_SERVERS,
|
||||
SERVER_MANUFACTURER,
|
||||
SERVER_MODEL,
|
||||
SERVER_MODEL_ID,
|
||||
@ -316,9 +314,9 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove from list of known players when removed from hass."""
|
||||
known_servers = self.hass.data[DOMAIN][KNOWN_SERVERS]
|
||||
known_players = known_servers[self.coordinator.server_uuid][KNOWN_PLAYERS]
|
||||
known_players.remove(self.coordinator.player.player_id)
|
||||
self.coordinator.config_entry.runtime_data.known_player_ids.remove(
|
||||
self.coordinator.player.player_id
|
||||
)
|
||||
|
||||
@property
|
||||
def volume_level(self) -> float | None:
|
||||
|
@ -8,13 +8,27 @@ from stookwijzer import Stookwijzer
|
||||
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er, issue_registry as ir
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .coordinator import StookwijzerConfigEntry, StookwijzerCoordinator
|
||||
from .services import setup_services
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Stookwijzer component."""
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: StookwijzerConfigEntry) -> bool:
|
||||
"""Set up Stookwijzer from a config entry."""
|
||||
|
@ -5,3 +5,6 @@ from typing import Final
|
||||
|
||||
DOMAIN: Final = "stookwijzer"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
||||
SERVICE_GET_FORECAST = "get_forecast"
|
||||
|
7
homeassistant/components/stookwijzer/icons.json
Normal file
7
homeassistant/components/stookwijzer/icons.json
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"get_forecast": {
|
||||
"service": "mdi:clock-plus-outline"
|
||||
}
|
||||
}
|
||||
}
|
76
homeassistant/components/stookwijzer/services.py
Normal file
76
homeassistant/components/stookwijzer/services.py
Normal file
@ -0,0 +1,76 @@
|
||||
"""Define services for the Stookwijzer integration."""
|
||||
|
||||
from typing import Required, TypedDict, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
|
||||
from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_GET_FORECAST
|
||||
from .coordinator import StookwijzerConfigEntry
|
||||
|
||||
SERVICE_GET_FORECAST_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Forecast(TypedDict):
|
||||
"""Typed Stookwijzer forecast dict."""
|
||||
|
||||
datetime: Required[str]
|
||||
advice: str | None
|
||||
final: bool | None
|
||||
|
||||
|
||||
def async_get_entry(
|
||||
hass: HomeAssistant, config_entry_id: str
|
||||
) -> StookwijzerConfigEntry:
|
||||
"""Get the Overseerr config entry."""
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": entry.title},
|
||||
)
|
||||
return cast(StookwijzerConfigEntry, entry)
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Stookwijzer integration."""
|
||||
|
||||
async def async_get_forecast(call: ServiceCall) -> ServiceResponse | None:
|
||||
"""Get the forecast from API endpoint."""
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
client = entry.runtime_data.client
|
||||
|
||||
return cast(
|
||||
ServiceResponse,
|
||||
{
|
||||
"forecast": cast(
|
||||
list[Forecast], await client.async_get_forecast() or []
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_FORECAST,
|
||||
async_get_forecast,
|
||||
schema=SERVICE_GET_FORECAST_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
7
homeassistant/components/stookwijzer/services.yaml
Normal file
7
homeassistant/components/stookwijzer/services.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
get_forecast:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: stookwijzer
|
@ -27,6 +27,18 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_forecast": {
|
||||
"name": "Get forecast",
|
||||
"description": "Retrieves the advice forecast from Stookwijzer.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "Stookwijzer instance",
|
||||
"description": "The Stookwijzer instance to get the forecast from."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"location_migration_failed": {
|
||||
"description": "The Stookwijzer integration was unable to automatically migrate your location to a new format the updated integration uses.\n\nMake sure you are connected to the Internet and restart Home Assistant to try again.\n\nIf this doesn't resolve the error, remove and re-add the integration.",
|
||||
@ -36,6 +48,12 @@
|
||||
"exceptions": {
|
||||
"no_data_received": {
|
||||
"message": "No data received from Stookwijzer."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"integration_not_found": {
|
||||
"message": "Integration \"{target}\" not found in registry."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CLIMATE,
|
||||
Platform.FAN,
|
||||
Platform.LOCK,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
@ -51,6 +52,7 @@ class SwitchbotDevices:
|
||||
sensors: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
||||
vacuums: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
||||
locks: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
||||
fans: list[tuple[Device, SwitchBotCoordinator]] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -96,7 +98,6 @@ async def make_switchbot_devices(
|
||||
for device in devices
|
||||
]
|
||||
)
|
||||
|
||||
return devices_data
|
||||
|
||||
|
||||
@ -177,6 +178,16 @@ async def make_device_data(
|
||||
else:
|
||||
devices_data.switches.append((device, coordinator))
|
||||
|
||||
if isinstance(device, Device) and device.device_type in [
|
||||
"Battery Circulator Fan",
|
||||
"Circulator Fan",
|
||||
]:
|
||||
coordinator = await coordinator_for_device(
|
||||
hass, entry, api, device, coordinators_by_id
|
||||
)
|
||||
devices_data.fans.append((device, coordinator))
|
||||
devices_data.sensors.append((device, coordinator))
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up SwitchBot via API from a config entry."""
|
||||
|
120
homeassistant/components/switchbot_cloud/fan.py
Normal file
120
homeassistant/components/switchbot_cloud/fan.py
Normal file
@ -0,0 +1,120 @@
|
||||
"""Support for the Switchbot Battery Circulator fan."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from switchbot_api import (
|
||||
BatteryCirculatorFanCommands,
|
||||
BatteryCirculatorFanMode,
|
||||
CommonCommands,
|
||||
)
|
||||
|
||||
from homeassistant.components.fan import FanEntity, FanEntityFeature
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import SwitchbotCloudData
|
||||
from .const import DOMAIN
|
||||
from .entity import SwitchBotCloudEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up SwitchBot Cloud entry."""
|
||||
data: SwitchbotCloudData = hass.data[DOMAIN][config.entry_id]
|
||||
async_add_entities(
|
||||
SwitchBotCloudFan(data.api, device, coordinator)
|
||||
for device, coordinator in data.devices.fans
|
||||
)
|
||||
|
||||
|
||||
class SwitchBotCloudFan(SwitchBotCloudEntity, FanEntity):
|
||||
"""Representation of a SwitchBot Battery Circulator Fan."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
_attr_supported_features = (
|
||||
FanEntityFeature.SET_SPEED
|
||||
| FanEntityFeature.PRESET_MODE
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_preset_modes = list(BatteryCirculatorFanMode)
|
||||
|
||||
_attr_is_on: bool | None = None
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the entity is on."""
|
||||
return self._attr_is_on
|
||||
|
||||
def _set_attributes(self) -> None:
|
||||
"""Set attributes from coordinator data."""
|
||||
if self.coordinator.data is None:
|
||||
return
|
||||
|
||||
power: str = self.coordinator.data["power"]
|
||||
mode: str = self.coordinator.data["mode"]
|
||||
fan_speed: str = self.coordinator.data["fanSpeed"]
|
||||
self._attr_is_on = power == "on"
|
||||
self._attr_preset_mode = mode
|
||||
self._attr_percentage = int(fan_speed)
|
||||
self._attr_supported_features = (
|
||||
FanEntityFeature.PRESET_MODE
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
if self.is_on and self.preset_mode == BatteryCirculatorFanMode.DIRECT.value:
|
||||
self._attr_supported_features |= FanEntityFeature.SET_SPEED
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
percentage: int | None = None,
|
||||
preset_mode: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Turn on the fan."""
|
||||
await self.send_api_command(CommonCommands.ON)
|
||||
await self.send_api_command(
|
||||
command=BatteryCirculatorFanCommands.SET_WIND_MODE,
|
||||
parameters=str(self.preset_mode),
|
||||
)
|
||||
if self.preset_mode == BatteryCirculatorFanMode.DIRECT.value:
|
||||
await self.send_api_command(
|
||||
command=BatteryCirculatorFanCommands.SET_WIND_SPEED,
|
||||
parameters=str(self.percentage),
|
||||
)
|
||||
await asyncio.sleep(5)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the fan."""
|
||||
await self.send_api_command(CommonCommands.OFF)
|
||||
await asyncio.sleep(5)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_percentage(self, percentage: int) -> None:
|
||||
"""Set the speed of the fan, as a percentage."""
|
||||
await self.send_api_command(
|
||||
command=BatteryCirculatorFanCommands.SET_WIND_MODE,
|
||||
parameters=str(BatteryCirculatorFanMode.DIRECT.value),
|
||||
)
|
||||
await self.send_api_command(
|
||||
command=BatteryCirculatorFanCommands.SET_WIND_SPEED,
|
||||
parameters=str(percentage),
|
||||
)
|
||||
await asyncio.sleep(5)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
await self.send_api_command(
|
||||
command=BatteryCirculatorFanCommands.SET_WIND_MODE,
|
||||
parameters=preset_mode,
|
||||
)
|
||||
await asyncio.sleep(5)
|
||||
await self.coordinator.async_request_refresh()
|
@ -91,6 +91,7 @@ CO2_DESCRIPTION = SensorEntityDescription(
|
||||
|
||||
SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
"Bot": (BATTERY_DESCRIPTION,),
|
||||
"Battery Circulator Fan": (BATTERY_DESCRIPTION,),
|
||||
"Meter": (
|
||||
TEMPERATURE_DESCRIPTION,
|
||||
HUMIDITY_DESCRIPTION,
|
||||
|
@ -32,8 +32,6 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, selector, template
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -42,15 +40,11 @@ from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.script import Script
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_OBJECT_ID, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .template_entity import (
|
||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_schema,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -88,7 +82,7 @@ class TemplateCodeFormat(Enum):
|
||||
text = CodeFormat.TEXT
|
||||
|
||||
|
||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
||||
LEGACY_FIELDS = {
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
}
|
||||
|
||||
@ -161,54 +155,6 @@ ALARM_CONTROL_PANEL_CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_conf(
|
||||
hass: HomeAssistant, config: dict[str, dict]
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy alarm control panel configuration definitions to modern ones."""
|
||||
alarm_control_panels = []
|
||||
|
||||
for object_id, entity_conf in config.items():
|
||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
||||
hass, entity_conf, LEGACY_FIELDS
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_conf:
|
||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
alarm_control_panels.append(entity_conf)
|
||||
|
||||
return alarm_control_panels
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template alarm control panels."""
|
||||
alarm_control_panels = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
alarm_control_panels.append(
|
||||
AlarmControlPanelTemplate(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(alarm_control_panels)
|
||||
|
||||
|
||||
def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str, dict]:
|
||||
"""Rewrite option configuration to modern configuration."""
|
||||
option_config = {**option_config}
|
||||
@ -231,7 +177,7 @@ async def async_setup_entry(
|
||||
validated_config = ALARM_CONTROL_PANEL_CONFIG_SCHEMA(_options)
|
||||
async_add_entities(
|
||||
[
|
||||
AlarmControlPanelTemplate(
|
||||
StateAlarmControlPanelEntity(
|
||||
hass,
|
||||
validated_config,
|
||||
config_entry.entry_id,
|
||||
@ -247,27 +193,16 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Template cover."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
rewrite_legacy_to_modern_conf(hass, config[CONF_ALARM_CONTROL_PANELS]),
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerAlarmControlPanelEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
ALARM_CONTROL_PANEL_DOMAIN,
|
||||
config,
|
||||
StateAlarmControlPanelEntity,
|
||||
TriggerAlarmControlPanelEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
legacy_key=CONF_ALARM_CONTROL_PANELS,
|
||||
)
|
||||
|
||||
|
||||
@ -276,6 +211,8 @@ class AbstractTemplateAlarmControlPanel(
|
||||
):
|
||||
"""Representation of a templated Alarm Control Panel features."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
@ -414,7 +351,7 @@ class AbstractTemplateAlarmControlPanel(
|
||||
)
|
||||
|
||||
|
||||
class AlarmControlPanelTemplate(TemplateEntity, AbstractTemplateAlarmControlPanel):
|
||||
class StateAlarmControlPanelEntity(TemplateEntity, AbstractTemplateAlarmControlPanel):
|
||||
"""Representation of a templated Alarm Control Panel."""
|
||||
|
||||
_attr_should_poll = False
|
||||
@ -426,12 +363,8 @@ class AlarmControlPanelTemplate(TemplateEntity, AbstractTemplateAlarmControlPane
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the panel."""
|
||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateAlarmControlPanel.__init__(self, config)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
@ -442,11 +375,6 @@ class AlarmControlPanelTemplate(TemplateEntity, AbstractTemplateAlarmControlPane
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
await super().async_added_to_hass()
|
||||
@ -497,11 +425,6 @@ class TriggerAlarmControlPanelEntity(TriggerEntity, AbstractTemplateAlarmControl
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
await super().async_added_to_hass()
|
||||
|
@ -24,9 +24,7 @@ from homeassistant.const import (
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_DEVICE_ID,
|
||||
CONF_ENTITY_PICTURE_TEMPLATE,
|
||||
CONF_FRIENDLY_NAME,
|
||||
CONF_FRIENDLY_NAME_TEMPLATE,
|
||||
CONF_ICON,
|
||||
CONF_ICON_TEMPLATE,
|
||||
CONF_NAME,
|
||||
CONF_SENSORS,
|
||||
@ -41,8 +39,6 @@ from homeassistant.const import (
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, selector, template
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -53,18 +49,9 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import (
|
||||
CONF_ATTRIBUTES,
|
||||
CONF_AVAILABILITY,
|
||||
CONF_AVAILABILITY_TEMPLATE,
|
||||
CONF_OBJECT_ID,
|
||||
CONF_PICTURE,
|
||||
)
|
||||
from .template_entity import (
|
||||
TEMPLATE_ENTITY_COMMON_SCHEMA,
|
||||
TemplateEntity,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .const import CONF_AVAILABILITY_TEMPLATE
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import TEMPLATE_ENTITY_COMMON_SCHEMA, TemplateEntity
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
CONF_DELAY_ON = "delay_on"
|
||||
@ -73,12 +60,7 @@ CONF_AUTO_OFF = "auto_off"
|
||||
CONF_ATTRIBUTE_TEMPLATES = "attribute_templates"
|
||||
|
||||
LEGACY_FIELDS = {
|
||||
CONF_ICON_TEMPLATE: CONF_ICON,
|
||||
CONF_ENTITY_PICTURE_TEMPLATE: CONF_PICTURE,
|
||||
CONF_AVAILABILITY_TEMPLATE: CONF_AVAILABILITY,
|
||||
CONF_ATTRIBUTE_TEMPLATES: CONF_ATTRIBUTES,
|
||||
CONF_FRIENDLY_NAME_TEMPLATE: CONF_NAME,
|
||||
CONF_FRIENDLY_NAME: CONF_NAME,
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
}
|
||||
|
||||
@ -121,27 +103,6 @@ LEGACY_BINARY_SENSOR_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_conf(
|
||||
hass: HomeAssistant, cfg: dict[str, dict]
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy binary sensor definitions to modern ones."""
|
||||
sensors = []
|
||||
|
||||
for object_id, entity_cfg in cfg.items():
|
||||
entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_cfg = rewrite_common_legacy_to_modern_conf(
|
||||
hass, entity_cfg, LEGACY_FIELDS
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_cfg:
|
||||
entity_cfg[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
sensors.append(entity_cfg)
|
||||
|
||||
return sensors
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(
|
||||
@ -151,33 +112,6 @@ PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback | AddConfigEntryEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template binary sensors."""
|
||||
sensors = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
sensors.append(
|
||||
BinarySensorTemplate(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -185,27 +119,16 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template binary sensors."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
rewrite_legacy_to_modern_conf(hass, config[CONF_SENSORS]),
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerBinarySensorEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
BINARY_SENSOR_DOMAIN,
|
||||
config,
|
||||
StateBinarySensorEntity,
|
||||
TriggerBinarySensorEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
legacy_key=CONF_SENSORS,
|
||||
)
|
||||
|
||||
|
||||
@ -219,23 +142,24 @@ async def async_setup_entry(
|
||||
_options.pop("template_type")
|
||||
validated_config = BINARY_SENSOR_CONFIG_SCHEMA(_options)
|
||||
async_add_entities(
|
||||
[BinarySensorTemplate(hass, validated_config, config_entry.entry_id)]
|
||||
[StateBinarySensorEntity(hass, validated_config, config_entry.entry_id)]
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_binary_sensor(
|
||||
hass: HomeAssistant, name: str, config: dict[str, Any]
|
||||
) -> BinarySensorTemplate:
|
||||
) -> StateBinarySensorEntity:
|
||||
"""Create a preview sensor."""
|
||||
validated_config = BINARY_SENSOR_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
||||
return BinarySensorTemplate(hass, validated_config, None)
|
||||
return StateBinarySensorEntity(hass, validated_config, None)
|
||||
|
||||
|
||||
class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
||||
class StateBinarySensorEntity(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
||||
"""A virtual binary sensor that triggers from another sensor."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -244,11 +168,7 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the Template binary sensor."""
|
||||
super().__init__(hass, config=config, unique_id=unique_id)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
self._template = config[CONF_STATE]
|
||||
@ -257,10 +177,6 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
||||
self._delay_on_raw = config.get(CONF_DELAY_ON)
|
||||
self._delay_off = None
|
||||
self._delay_off_raw = config.get(CONF_DELAY_OFF)
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore state."""
|
||||
@ -333,6 +249,7 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity):
|
||||
class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity):
|
||||
"""Sensor entity based on trigger data."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
domain = BINARY_SENSOR_DOMAIN
|
||||
extra_template_keys = (CONF_STATE,)
|
||||
|
||||
|
@ -3,20 +3,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.button import DEVICE_CLASSES_SCHEMA, ButtonEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_DEVICE_ID,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
from homeassistant.components.button import (
|
||||
DEVICE_CLASSES_SCHEMA,
|
||||
DOMAIN as BUTTON_DOMAIN,
|
||||
ENTITY_ID_FORMAT,
|
||||
ButtonEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_DEVICE_ID, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import config_validation as cv, selector
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
@ -26,6 +25,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_PRESS, DOMAIN
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -50,19 +50,6 @@ CONFIG_BUTTON_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def _async_create_entities(
|
||||
hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None
|
||||
) -> list[TemplateButtonEntity]:
|
||||
"""Create the Template button."""
|
||||
entities = []
|
||||
for definition in definitions:
|
||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
entities.append(TemplateButtonEntity(hass, definition, unique_id))
|
||||
return entities
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -70,15 +57,14 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template button."""
|
||||
if not discovery_info or "coordinator" in discovery_info:
|
||||
raise PlatformNotReady(
|
||||
"The template button platform doesn't support trigger entities"
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
await _async_create_entities(
|
||||
hass, discovery_info["entities"], discovery_info["unique_id"]
|
||||
)
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
BUTTON_DOMAIN,
|
||||
config,
|
||||
StateButtonEntity,
|
||||
None,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
)
|
||||
|
||||
|
||||
@ -92,14 +78,15 @@ async def async_setup_entry(
|
||||
_options.pop("template_type")
|
||||
validated_config = CONFIG_BUTTON_SCHEMA(_options)
|
||||
async_add_entities(
|
||||
[TemplateButtonEntity(hass, validated_config, config_entry.entry_id)]
|
||||
[StateButtonEntity(hass, validated_config, config_entry.entry_id)]
|
||||
)
|
||||
|
||||
|
||||
class TemplateButtonEntity(TemplateEntity, ButtonEntity):
|
||||
class StateButtonEntity(TemplateEntity, ButtonEntity):
|
||||
"""Representation of a template button."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -108,8 +95,11 @@ class TemplateButtonEntity(TemplateEntity, ButtonEntity):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the button."""
|
||||
super().__init__(hass, config=config, unique_id=unique_id)
|
||||
assert self._attr_name is not None
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._attr_name is not None
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (action := config.get(CONF_PRESS)) is not None:
|
||||
self.add_script(CONF_PRESS, action, self._attr_name, DOMAIN)
|
||||
|
@ -65,7 +65,7 @@ from . import (
|
||||
weather as weather_platform,
|
||||
)
|
||||
from .const import DOMAIN, PLATFORMS, TemplateConfig
|
||||
from .helpers import async_get_blueprints
|
||||
from .helpers import async_get_blueprints, rewrite_legacy_to_modern_configs
|
||||
|
||||
PACKAGE_MERGE_HINT = "list"
|
||||
|
||||
@ -249,16 +249,16 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf
|
||||
|
||||
legacy_warn_printed = False
|
||||
|
||||
for old_key, new_key, transform in (
|
||||
for old_key, new_key, legacy_fields in (
|
||||
(
|
||||
CONF_SENSORS,
|
||||
DOMAIN_SENSOR,
|
||||
sensor_platform.rewrite_legacy_to_modern_conf,
|
||||
sensor_platform.LEGACY_FIELDS,
|
||||
),
|
||||
(
|
||||
CONF_BINARY_SENSORS,
|
||||
DOMAIN_BINARY_SENSOR,
|
||||
binary_sensor_platform.rewrite_legacy_to_modern_conf,
|
||||
binary_sensor_platform.LEGACY_FIELDS,
|
||||
),
|
||||
):
|
||||
if old_key not in template_config:
|
||||
@ -276,7 +276,11 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf
|
||||
definitions = (
|
||||
list(template_config[new_key]) if new_key in template_config else []
|
||||
)
|
||||
definitions.extend(transform(hass, template_config[old_key]))
|
||||
definitions.extend(
|
||||
rewrite_legacy_to_modern_configs(
|
||||
hass, template_config[old_key], legacy_fields
|
||||
)
|
||||
)
|
||||
template_config = TemplateConfig({**template_config, new_key: definitions})
|
||||
|
||||
config_sections.append(template_config)
|
||||
|
@ -32,19 +32,17 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_OBJECT_ID, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import (
|
||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
||||
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_schema,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
@ -85,7 +83,7 @@ TILT_FEATURES = (
|
||||
| CoverEntityFeature.SET_TILT_POSITION
|
||||
)
|
||||
|
||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
||||
LEGACY_FIELDS = {
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
CONF_POSITION_TEMPLATE: CONF_POSITION,
|
||||
CONF_TILT_TEMPLATE: CONF_TILT,
|
||||
@ -140,54 +138,6 @@ PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_conf(
|
||||
hass: HomeAssistant, config: dict[str, dict]
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy switch configuration definitions to modern ones."""
|
||||
covers = []
|
||||
|
||||
for object_id, entity_conf in config.items():
|
||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
||||
hass, entity_conf, LEGACY_FIELDS
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_conf:
|
||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
covers.append(entity_conf)
|
||||
|
||||
return covers
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template switches."""
|
||||
covers = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
covers.append(
|
||||
CoverTemplate(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(covers)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -195,33 +145,24 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Template cover."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
rewrite_legacy_to_modern_conf(hass, config[CONF_COVERS]),
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerCoverEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
COVER_DOMAIN,
|
||||
config,
|
||||
StateCoverEntity,
|
||||
TriggerCoverEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
legacy_key=CONF_COVERS,
|
||||
)
|
||||
|
||||
|
||||
class AbstractTemplateCover(AbstractTemplateEntity, CoverEntity):
|
||||
"""Representation of a template cover features."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
@ -445,7 +386,7 @@ class AbstractTemplateCover(AbstractTemplateEntity, CoverEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class CoverTemplate(TemplateEntity, AbstractTemplateCover):
|
||||
class StateCoverEntity(TemplateEntity, AbstractTemplateCover):
|
||||
"""Representation of a Template cover."""
|
||||
|
||||
_attr_should_poll = False
|
||||
@ -457,12 +398,8 @@ class CoverTemplate(TemplateEntity, AbstractTemplateCover):
|
||||
unique_id,
|
||||
) -> None:
|
||||
"""Initialize the Template cover."""
|
||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateCover.__init__(self, config)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
@ -3,21 +3,39 @@
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import CONF_DEVICE_ID
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||
from homeassistant.helpers.entity import Entity, async_generate_entity_id
|
||||
from homeassistant.helpers.script import Script, _VarsType
|
||||
from homeassistant.helpers.template import TemplateStateFromEntityId
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_OBJECT_ID
|
||||
|
||||
|
||||
class AbstractTemplateEntity(Entity):
|
||||
"""Actions linked to a template entity."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
_entity_id_format: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
self.hass = hass
|
||||
self._action_scripts: dict[str, Script] = {}
|
||||
|
||||
if self.hass:
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
self._entity_id_format, object_id, hass=self.hass
|
||||
)
|
||||
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
self.hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
|
||||
@property
|
||||
def referenced_blueprint(self) -> str | None:
|
||||
"""Return referenced blueprint or None."""
|
||||
|
@ -34,19 +34,17 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_OBJECT_ID, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import (
|
||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
||||
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_schema,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
@ -73,7 +71,7 @@ CONF_OSCILLATING = "oscillating"
|
||||
CONF_PERCENTAGE = "percentage"
|
||||
CONF_PRESET_MODE = "preset_mode"
|
||||
|
||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
||||
LEGACY_FIELDS = {
|
||||
CONF_DIRECTION_TEMPLATE: CONF_DIRECTION,
|
||||
CONF_OSCILLATING_TEMPLATE: CONF_OSCILLATING,
|
||||
CONF_PERCENTAGE_TEMPLATE: CONF_PERCENTAGE,
|
||||
@ -132,54 +130,6 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_conf(
|
||||
hass: HomeAssistant, config: dict[str, dict]
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy fan configuration definitions to modern ones."""
|
||||
fans = []
|
||||
|
||||
for object_id, entity_conf in config.items():
|
||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
||||
hass, entity_conf, LEGACY_FIELDS
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_conf:
|
||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
fans.append(entity_conf)
|
||||
|
||||
return fans
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template fans."""
|
||||
fans = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
fans.append(
|
||||
TemplateFan(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(fans)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -187,33 +137,24 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template fans."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
rewrite_legacy_to_modern_conf(hass, config[CONF_FANS]),
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerFanEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
FAN_DOMAIN,
|
||||
config,
|
||||
StateFanEntity,
|
||||
TriggerFanEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
legacy_key=CONF_FANS,
|
||||
)
|
||||
|
||||
|
||||
class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
"""Representation of a template fan features."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
@ -484,7 +425,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
)
|
||||
|
||||
|
||||
class TemplateFan(TemplateEntity, AbstractTemplateFan):
|
||||
class StateFanEntity(TemplateEntity, AbstractTemplateFan):
|
||||
"""A template fan component."""
|
||||
|
||||
_attr_should_poll = False
|
||||
@ -496,12 +437,8 @@ class TemplateFan(TemplateEntity, AbstractTemplateFan):
|
||||
unique_id,
|
||||
) -> None:
|
||||
"""Initialize the fan."""
|
||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateFan.__init__(self, config)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
@ -1,19 +1,60 @@
|
||||
"""Helpers for template integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
import itertools
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import blueprint
|
||||
from homeassistant.const import SERVICE_RELOAD
|
||||
from homeassistant.const import (
|
||||
CONF_ENTITY_PICTURE_TEMPLATE,
|
||||
CONF_FRIENDLY_NAME,
|
||||
CONF_ICON,
|
||||
CONF_ICON_TEMPLATE,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
SERVICE_RELOAD,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import async_get_platforms
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import template
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddEntitiesCallback,
|
||||
async_get_platforms,
|
||||
)
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import (
|
||||
CONF_ATTRIBUTE_TEMPLATES,
|
||||
CONF_ATTRIBUTES,
|
||||
CONF_AVAILABILITY,
|
||||
CONF_AVAILABILITY_TEMPLATE,
|
||||
CONF_OBJECT_ID,
|
||||
CONF_PICTURE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .template_entity import TemplateEntity
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
DATA_BLUEPRINTS = "template_blueprints"
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
LEGACY_FIELDS = {
|
||||
CONF_ICON_TEMPLATE: CONF_ICON,
|
||||
CONF_ENTITY_PICTURE_TEMPLATE: CONF_PICTURE,
|
||||
CONF_AVAILABILITY_TEMPLATE: CONF_AVAILABILITY,
|
||||
CONF_ATTRIBUTE_TEMPLATES: CONF_ATTRIBUTES,
|
||||
CONF_FRIENDLY_NAME: CONF_NAME,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type CreateTemplateEntitiesCallback = Callable[
|
||||
[type[TemplateEntity], AddEntitiesCallback, HomeAssistant, list[dict], str | None],
|
||||
None,
|
||||
]
|
||||
|
||||
|
||||
@callback
|
||||
@ -59,8 +100,131 @@ def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
||||
return blueprint.DomainBlueprints(
|
||||
hass,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
_LOGGER,
|
||||
_blueprint_in_use,
|
||||
_reload_blueprint_templates,
|
||||
TEMPLATE_BLUEPRINT_SCHEMA,
|
||||
)
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_config(
|
||||
hass: HomeAssistant,
|
||||
entity_cfg: dict[str, Any],
|
||||
extra_legacy_fields: dict[str, str],
|
||||
) -> dict[str, Any]:
|
||||
"""Rewrite legacy config."""
|
||||
entity_cfg = {**entity_cfg}
|
||||
|
||||
for from_key, to_key in itertools.chain(
|
||||
LEGACY_FIELDS.items(), extra_legacy_fields.items()
|
||||
):
|
||||
if from_key not in entity_cfg or to_key in entity_cfg:
|
||||
continue
|
||||
|
||||
val = entity_cfg.pop(from_key)
|
||||
if isinstance(val, str):
|
||||
val = template.Template(val, hass)
|
||||
entity_cfg[to_key] = val
|
||||
|
||||
if CONF_NAME in entity_cfg and isinstance(entity_cfg[CONF_NAME], str):
|
||||
entity_cfg[CONF_NAME] = template.Template(entity_cfg[CONF_NAME], hass)
|
||||
|
||||
return entity_cfg
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_configs(
|
||||
hass: HomeAssistant,
|
||||
entity_cfg: dict[str, dict],
|
||||
extra_legacy_fields: dict[str, str],
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy configuration definitions to modern ones."""
|
||||
entities = []
|
||||
for object_id, entity_conf in entity_cfg.items():
|
||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_conf = rewrite_legacy_to_modern_config(
|
||||
hass, entity_conf, extra_legacy_fields
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_conf:
|
||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
entities.append(entity_conf)
|
||||
|
||||
return entities
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_template_tracking_entities(
|
||||
entity_cls: type[Entity],
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template tracking entities."""
|
||||
entities: list[Entity] = []
|
||||
for definition in definitions:
|
||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
entities.append(entity_cls(hass, definition, unique_id)) # type: ignore[call-arg]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
async def async_setup_template_platform(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
config: ConfigType,
|
||||
state_entity_cls: type[TemplateEntity],
|
||||
trigger_entity_cls: type[TriggerEntity] | None,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None,
|
||||
legacy_fields: dict[str, str] | None = None,
|
||||
legacy_key: str | None = None,
|
||||
) -> None:
|
||||
"""Set up the Template platform."""
|
||||
if discovery_info is None:
|
||||
# Legacy Configuration
|
||||
if legacy_fields is not None:
|
||||
if legacy_key:
|
||||
configs = rewrite_legacy_to_modern_configs(
|
||||
hass, config[legacy_key], legacy_fields
|
||||
)
|
||||
else:
|
||||
configs = [rewrite_legacy_to_modern_config(hass, config, legacy_fields)]
|
||||
async_create_template_tracking_entities(
|
||||
state_entity_cls,
|
||||
async_add_entities,
|
||||
hass,
|
||||
configs,
|
||||
None,
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Template %s entities can only be configured under template:", domain
|
||||
)
|
||||
return
|
||||
|
||||
# Trigger Configuration
|
||||
if "coordinator" in discovery_info:
|
||||
if trigger_entity_cls:
|
||||
entities = [
|
||||
trigger_entity_cls(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
]
|
||||
async_add_entities(entities)
|
||||
else:
|
||||
raise PlatformNotReady(
|
||||
f"The template {domain} platform doesn't support trigger entities"
|
||||
)
|
||||
return
|
||||
|
||||
# Modern Configuration
|
||||
async_create_template_tracking_entities(
|
||||
state_entity_cls,
|
||||
async_add_entities,
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
)
|
||||
|
@ -7,15 +7,13 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN, ImageEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE_ID,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_URL,
|
||||
CONF_VERIFY_SSL,
|
||||
from homeassistant.components.image import (
|
||||
DOMAIN as IMAGE_DOMAIN,
|
||||
ENTITY_ID_FORMAT,
|
||||
ImageEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_DEVICE_ID, CONF_NAME, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, selector
|
||||
@ -29,6 +27,7 @@ from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_PICTURE
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import (
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_attributes_schema,
|
||||
@ -59,19 +58,6 @@ IMAGE_CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def _async_create_entities(
|
||||
hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None
|
||||
) -> list[StateImageEntity]:
|
||||
"""Create the template image."""
|
||||
entities = []
|
||||
for definition in definitions:
|
||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
entities.append(StateImageEntity(hass, definition, unique_id))
|
||||
return entities
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -79,23 +65,14 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template image."""
|
||||
if discovery_info is None:
|
||||
_LOGGER.warning(
|
||||
"Template image entities can only be configured under template:"
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerImageEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
async_add_entities(
|
||||
await _async_create_entities(
|
||||
hass, discovery_info["entities"], discovery_info["unique_id"]
|
||||
)
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
IMAGE_DOMAIN,
|
||||
config,
|
||||
StateImageEntity,
|
||||
TriggerImageEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
)
|
||||
|
||||
|
||||
@ -118,6 +95,7 @@ class StateImageEntity(TemplateEntity, ImageEntity):
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_image_url: str | None = None
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -126,7 +104,7 @@ class StateImageEntity(TemplateEntity, ImageEntity):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the image."""
|
||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
ImageEntity.__init__(self, hass, config[CONF_VERIFY_SSL])
|
||||
self._url_template = config[CONF_URL]
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
@ -162,6 +140,7 @@ class TriggerImageEntity(TriggerEntity, ImageEntity):
|
||||
"""Image entity based on trigger data."""
|
||||
|
||||
_attr_image_url: str | None = None
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
domain = IMAGE_DOMAIN
|
||||
extra_template_keys = (CONF_URL,)
|
||||
|
@ -43,20 +43,18 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_OBJECT_ID, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import (
|
||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
||||
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_schema,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
@ -103,7 +101,7 @@ CONF_WHITE_VALUE_TEMPLATE = "white_value_template"
|
||||
DEFAULT_MIN_MIREDS = 153
|
||||
DEFAULT_MAX_MIREDS = 500
|
||||
|
||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
||||
LEGACY_FIELDS = {
|
||||
CONF_COLOR_ACTION: CONF_HS_ACTION,
|
||||
CONF_COLOR_TEMPLATE: CONF_HS,
|
||||
CONF_EFFECT_LIST_TEMPLATE: CONF_EFFECT_LIST,
|
||||
@ -193,47 +191,6 @@ PLATFORM_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_conf(
|
||||
hass: HomeAssistant, config: dict[str, dict]
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy switch configuration definitions to modern ones."""
|
||||
lights = []
|
||||
for object_id, entity_conf in config.items():
|
||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
||||
hass, entity_conf, LEGACY_FIELDS
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_conf:
|
||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
lights.append(entity_conf)
|
||||
|
||||
return lights
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the Template Lights."""
|
||||
lights = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
lights.append(LightTemplate(hass, entity_conf, unique_id))
|
||||
|
||||
async_add_entities(lights)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -241,33 +198,24 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template lights."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
rewrite_legacy_to_modern_conf(hass, config[CONF_LIGHTS]),
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerLightEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
LIGHT_DOMAIN,
|
||||
config,
|
||||
StateLightEntity,
|
||||
TriggerLightEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
legacy_key=CONF_LIGHTS,
|
||||
)
|
||||
|
||||
|
||||
class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
"""Representation of a template lights features."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
@ -934,7 +882,7 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
self._attr_supported_features |= LightEntityFeature.TRANSITION
|
||||
|
||||
|
||||
class LightTemplate(TemplateEntity, AbstractTemplateLight):
|
||||
class StateLightEntity(TemplateEntity, AbstractTemplateLight):
|
||||
"""Representation of a templated Light, including dimmable."""
|
||||
|
||||
_attr_should_poll = False
|
||||
@ -946,12 +894,8 @@ class LightTemplate(TemplateEntity, AbstractTemplateLight):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the light."""
|
||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateLight.__init__(self, config)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
@ -9,6 +9,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.lock import (
|
||||
DOMAIN as LOCK_DOMAIN,
|
||||
ENTITY_ID_FORMAT,
|
||||
PLATFORM_SCHEMA as LOCK_PLATFORM_SCHEMA,
|
||||
LockEntity,
|
||||
LockEntityFeature,
|
||||
@ -31,12 +32,11 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from .const import CONF_PICTURE, DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import (
|
||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
||||
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_schema,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
@ -49,7 +49,7 @@ CONF_OPEN = "open"
|
||||
DEFAULT_NAME = "Template Lock"
|
||||
DEFAULT_OPTIMISTIC = False
|
||||
|
||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
||||
LEGACY_FIELDS = {
|
||||
CONF_CODE_FORMAT_TEMPLATE: CONF_CODE_FORMAT,
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
}
|
||||
@ -83,33 +83,6 @@ PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend(
|
||||
).extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY.schema)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template fans."""
|
||||
fans = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
fans.append(
|
||||
TemplateLock(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(fans)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -117,33 +90,23 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template fans."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
[rewrite_common_legacy_to_modern_conf(hass, config, LEGACY_FIELDS)],
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerLockEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
LOCK_DOMAIN,
|
||||
config,
|
||||
StateLockEntity,
|
||||
TriggerLockEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
)
|
||||
|
||||
|
||||
class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||
"""Representation of a template lock features."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
@ -311,7 +274,7 @@ class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||
)
|
||||
|
||||
|
||||
class TemplateLock(TemplateEntity, AbstractTemplateLock):
|
||||
class StateLockEntity(TemplateEntity, AbstractTemplateLock):
|
||||
"""Representation of a template lock."""
|
||||
|
||||
_attr_should_poll = False
|
||||
@ -323,7 +286,7 @@ class TemplateLock(TemplateEntity, AbstractTemplateLock):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the lock."""
|
||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateLock.__init__(self, config)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
|
@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@ -13,6 +13,7 @@ from homeassistant.components.number import (
|
||||
DEFAULT_MIN_VALUE,
|
||||
DEFAULT_STEP,
|
||||
DOMAIN as NUMBER_DOMAIN,
|
||||
ENTITY_ID_FORMAT,
|
||||
NumberEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@ -21,12 +22,10 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_OPTIMISTIC,
|
||||
CONF_STATE,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, selector
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -35,6 +34,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_MAX, CONF_MIN, CONF_STEP, DOMAIN
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
@ -70,19 +70,6 @@ NUMBER_CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def _async_create_entities(
|
||||
hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None
|
||||
) -> list[TemplateNumber]:
|
||||
"""Create the Template number."""
|
||||
entities = []
|
||||
for definition in definitions:
|
||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
entities.append(TemplateNumber(hass, definition, unique_id))
|
||||
return entities
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -90,23 +77,14 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template number."""
|
||||
if discovery_info is None:
|
||||
_LOGGER.warning(
|
||||
"Template number entities can only be configured under template:"
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerNumberEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
async_add_entities(
|
||||
await _async_create_entities(
|
||||
hass, discovery_info["entities"], discovery_info["unique_id"]
|
||||
)
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
NUMBER_DOMAIN,
|
||||
config,
|
||||
StateNumberEntity,
|
||||
TriggerNumberEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
)
|
||||
|
||||
|
||||
@ -119,22 +97,25 @@ async def async_setup_entry(
|
||||
_options = dict(config_entry.options)
|
||||
_options.pop("template_type")
|
||||
validated_config = NUMBER_CONFIG_SCHEMA(_options)
|
||||
async_add_entities([TemplateNumber(hass, validated_config, config_entry.entry_id)])
|
||||
async_add_entities(
|
||||
[StateNumberEntity(hass, validated_config, config_entry.entry_id)]
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_number(
|
||||
hass: HomeAssistant, name: str, config: dict[str, Any]
|
||||
) -> TemplateNumber:
|
||||
) -> StateNumberEntity:
|
||||
"""Create a preview number."""
|
||||
validated_config = NUMBER_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
||||
return TemplateNumber(hass, validated_config, None)
|
||||
return StateNumberEntity(hass, validated_config, None)
|
||||
|
||||
|
||||
class TemplateNumber(TemplateEntity, NumberEntity):
|
||||
class StateNumberEntity(TemplateEntity, NumberEntity):
|
||||
"""Representation of a template number."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -143,8 +124,10 @@ class TemplateNumber(TemplateEntity, NumberEntity):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the number."""
|
||||
super().__init__(hass, config=config, unique_id=unique_id)
|
||||
assert self._attr_name is not None
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
if TYPE_CHECKING:
|
||||
assert self._attr_name is not None
|
||||
|
||||
self._value_template = config[CONF_STATE]
|
||||
self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], self._attr_name, DOMAIN)
|
||||
|
||||
@ -156,10 +139,6 @@ class TemplateNumber(TemplateEntity, NumberEntity):
|
||||
self._attr_native_step = DEFAULT_STEP
|
||||
self._attr_native_min_value = DEFAULT_MIN_VALUE
|
||||
self._attr_native_max_value = DEFAULT_MAX_VALUE
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
@ -208,6 +187,7 @@ class TemplateNumber(TemplateEntity, NumberEntity):
|
||||
class TriggerNumberEntity(TriggerEntity, NumberEntity):
|
||||
"""Number entity based on trigger data."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
domain = NUMBER_DOMAIN
|
||||
extra_template_keys = (
|
||||
CONF_STATE,
|
||||
|
@ -11,19 +11,13 @@ from homeassistant.components.select import (
|
||||
ATTR_OPTION,
|
||||
ATTR_OPTIONS,
|
||||
DOMAIN as SELECT_DOMAIN,
|
||||
ENTITY_ID_FORMAT,
|
||||
SelectEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE_ID,
|
||||
CONF_NAME,
|
||||
CONF_OPTIMISTIC,
|
||||
CONF_STATE,
|
||||
CONF_UNIQUE_ID,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_ID, CONF_NAME, CONF_OPTIMISTIC, CONF_STATE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, selector
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -33,6 +27,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
@ -65,19 +60,6 @@ SELECT_CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def _async_create_entities(
|
||||
hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None
|
||||
) -> list[TemplateSelect]:
|
||||
"""Create the Template select."""
|
||||
entities = []
|
||||
for definition in definitions:
|
||||
unique_id = definition.get(CONF_UNIQUE_ID)
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
entities.append(TemplateSelect(hass, definition, unique_id))
|
||||
return entities
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -85,23 +67,14 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template select."""
|
||||
if discovery_info is None:
|
||||
_LOGGER.warning(
|
||||
"Template select entities can only be configured under template:"
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerSelectEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
async_add_entities(
|
||||
await _async_create_entities(
|
||||
hass, discovery_info["entities"], discovery_info["unique_id"]
|
||||
)
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
SELECT_DOMAIN,
|
||||
config,
|
||||
TemplateSelect,
|
||||
TriggerSelectEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
)
|
||||
|
||||
|
||||
@ -120,6 +93,8 @@ async def async_setup_entry(
|
||||
class AbstractTemplateSelect(AbstractTemplateEntity, SelectEntity):
|
||||
"""Representation of a template select features."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
@ -159,7 +134,7 @@ class TemplateSelect(TemplateEntity, AbstractTemplateSelect):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the select."""
|
||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateSelect.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
@ -169,11 +144,6 @@ class TemplateSelect(TemplateEntity, AbstractTemplateSelect):
|
||||
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
||||
self.add_script(CONF_SELECT_OPTION, select_option, name, DOMAIN)
|
||||
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
|
@ -44,8 +44,6 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, selector, template
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -55,17 +53,13 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_ATTRIBUTE_TEMPLATES, CONF_AVAILABILITY_TEMPLATE, CONF_OBJECT_ID
|
||||
from .template_entity import (
|
||||
TEMPLATE_ENTITY_COMMON_SCHEMA,
|
||||
TemplateEntity,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .const import CONF_ATTRIBUTE_TEMPLATES, CONF_AVAILABILITY_TEMPLATE
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import TEMPLATE_ENTITY_COMMON_SCHEMA, TemplateEntity
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
LEGACY_FIELDS = {
|
||||
CONF_FRIENDLY_NAME_TEMPLATE: CONF_NAME,
|
||||
CONF_FRIENDLY_NAME: CONF_NAME,
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
}
|
||||
|
||||
@ -142,27 +136,6 @@ def extra_validation_checks(val):
|
||||
return val
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_conf(
|
||||
hass: HomeAssistant, cfg: dict[str, dict]
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy sensor definitions to modern ones."""
|
||||
sensors = []
|
||||
|
||||
for object_id, entity_cfg in cfg.items():
|
||||
entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_cfg = rewrite_common_legacy_to_modern_conf(
|
||||
hass, entity_cfg, LEGACY_FIELDS
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_cfg:
|
||||
entity_cfg[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
sensors.append(entity_cfg)
|
||||
|
||||
return sensors
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
@ -177,33 +150,6 @@ PLATFORM_SCHEMA = vol.All(
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback | AddConfigEntryEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template sensors."""
|
||||
sensors = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
sensors.append(
|
||||
SensorTemplate(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -211,27 +157,16 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template sensors."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
rewrite_legacy_to_modern_conf(hass, config[CONF_SENSORS]),
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerSensorEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
SENSOR_DOMAIN,
|
||||
config,
|
||||
StateSensorEntity,
|
||||
TriggerSensorEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
legacy_key=CONF_SENSORS,
|
||||
)
|
||||
|
||||
|
||||
@ -244,22 +179,25 @@ async def async_setup_entry(
|
||||
_options = dict(config_entry.options)
|
||||
_options.pop("template_type")
|
||||
validated_config = SENSOR_CONFIG_SCHEMA(_options)
|
||||
async_add_entities([SensorTemplate(hass, validated_config, config_entry.entry_id)])
|
||||
async_add_entities(
|
||||
[StateSensorEntity(hass, validated_config, config_entry.entry_id)]
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_sensor(
|
||||
hass: HomeAssistant, name: str, config: dict[str, Any]
|
||||
) -> SensorTemplate:
|
||||
) -> StateSensorEntity:
|
||||
"""Create a preview sensor."""
|
||||
validated_config = SENSOR_CONFIG_SCHEMA(config | {CONF_NAME: name})
|
||||
return SensorTemplate(hass, validated_config, None)
|
||||
return StateSensorEntity(hass, validated_config, None)
|
||||
|
||||
|
||||
class SensorTemplate(TemplateEntity, SensorEntity):
|
||||
class StateSensorEntity(TemplateEntity, SensorEntity):
|
||||
"""Representation of a Template Sensor."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -268,7 +206,7 @@ class SensorTemplate(TemplateEntity, SensorEntity):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(hass, config=config, fallback_name=None, unique_id=unique_id)
|
||||
super().__init__(hass, config, unique_id)
|
||||
self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
||||
@ -276,14 +214,6 @@ class SensorTemplate(TemplateEntity, SensorEntity):
|
||||
self._attr_last_reset_template: template.Template | None = config.get(
|
||||
ATTR_LAST_RESET
|
||||
)
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
@ -327,6 +257,7 @@ class SensorTemplate(TemplateEntity, SensorEntity):
|
||||
class TriggerSensorEntity(TriggerEntity, RestoreSensor):
|
||||
"""Sensor entity based on trigger data."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
domain = SENSOR_DOMAIN
|
||||
extra_template_keys = (CONF_STATE,)
|
||||
|
||||
|
@ -30,8 +30,6 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, selector, template
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_device_id
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@ -40,19 +38,18 @@ from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_OBJECT_ID, CONF_TURN_OFF, CONF_TURN_ON, DOMAIN
|
||||
from .const import CONF_TURN_OFF, CONF_TURN_ON, DOMAIN
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import (
|
||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
||||
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_schema,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
|
||||
|
||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
||||
LEGACY_FIELDS = {
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
}
|
||||
|
||||
@ -96,27 +93,6 @@ SWITCH_CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_conf(
|
||||
hass: HomeAssistant, config: dict[str, dict]
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy switch configuration definitions to modern ones."""
|
||||
switches = []
|
||||
|
||||
for object_id, entity_conf in config.items():
|
||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
||||
hass, entity_conf, LEGACY_FIELDS
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_conf:
|
||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
switches.append(entity_conf)
|
||||
|
||||
return switches
|
||||
|
||||
|
||||
def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str, dict]:
|
||||
"""Rewrite option configuration to modern configuration."""
|
||||
option_config = {**option_config}
|
||||
@ -127,33 +103,6 @@ def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str,
|
||||
return option_config
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template switches."""
|
||||
switches = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
switches.append(
|
||||
SwitchTemplate(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(switches)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -161,27 +110,16 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the template switches."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
rewrite_legacy_to_modern_conf(hass, config[CONF_SWITCHES]),
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerSwitchEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
SWITCH_DOMAIN,
|
||||
config,
|
||||
StateSwitchEntity,
|
||||
TriggerSwitchEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
legacy_key=CONF_SWITCHES,
|
||||
)
|
||||
|
||||
|
||||
@ -195,23 +133,26 @@ async def async_setup_entry(
|
||||
_options.pop("template_type")
|
||||
_options = rewrite_options_to_modern_conf(_options)
|
||||
validated_config = SWITCH_CONFIG_SCHEMA(_options)
|
||||
async_add_entities([SwitchTemplate(hass, validated_config, config_entry.entry_id)])
|
||||
async_add_entities(
|
||||
[StateSwitchEntity(hass, validated_config, config_entry.entry_id)]
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_switch(
|
||||
hass: HomeAssistant, name: str, config: dict[str, Any]
|
||||
) -> SwitchTemplate:
|
||||
) -> StateSwitchEntity:
|
||||
"""Create a preview switch."""
|
||||
updated_config = rewrite_options_to_modern_conf(config)
|
||||
validated_config = SWITCH_CONFIG_SCHEMA(updated_config | {CONF_NAME: name})
|
||||
return SwitchTemplate(hass, validated_config, None)
|
||||
return StateSwitchEntity(hass, validated_config, None)
|
||||
|
||||
|
||||
class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
|
||||
class StateSwitchEntity(TemplateEntity, SwitchEntity, RestoreEntity):
|
||||
"""Representation of a Template switch."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -220,11 +161,8 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the Template switch."""
|
||||
super().__init__(hass, config=config, unique_id=unique_id)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
super().__init__(hass, config, unique_id)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
@ -238,10 +176,6 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
|
||||
|
||||
self._state: bool | None = False
|
||||
self._attr_assumed_state = self._template is None
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
@ -304,6 +238,7 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
|
||||
class TriggerSwitchEntity(TriggerEntity, SwitchEntity, RestoreEntity):
|
||||
"""Switch entity based on trigger data."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
domain = SWITCH_DOMAIN
|
||||
|
||||
def __init__(
|
||||
@ -314,6 +249,7 @@ class TriggerSwitchEntity(TriggerEntity, SwitchEntity, RestoreEntity):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(hass, coordinator, config)
|
||||
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
self._template = config.get(CONF_STATE)
|
||||
if on_action := config.get(CONF_TURN_ON):
|
||||
@ -326,11 +262,6 @@ class TriggerSwitchEntity(TriggerEntity, SwitchEntity, RestoreEntity):
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
self._parse_result.add(CONF_STATE)
|
||||
|
||||
self._attr_device_info = async_device_info_to_link_from_device_id(
|
||||
hass,
|
||||
config.get(CONF_DEVICE_ID),
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
await super().async_added_to_hass()
|
||||
|
@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
import contextlib
|
||||
import itertools
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
@ -14,7 +13,6 @@ import voluptuous as vol
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.const import (
|
||||
CONF_ENTITY_PICTURE_TEMPLATE,
|
||||
CONF_FRIENDLY_NAME,
|
||||
CONF_ICON,
|
||||
CONF_ICON_TEMPLATE,
|
||||
CONF_NAME,
|
||||
@ -137,42 +135,6 @@ TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY = vol.Schema(
|
||||
).extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY.schema)
|
||||
|
||||
|
||||
LEGACY_FIELDS = {
|
||||
CONF_ICON_TEMPLATE: CONF_ICON,
|
||||
CONF_ENTITY_PICTURE_TEMPLATE: CONF_PICTURE,
|
||||
CONF_AVAILABILITY_TEMPLATE: CONF_AVAILABILITY,
|
||||
CONF_ATTRIBUTE_TEMPLATES: CONF_ATTRIBUTES,
|
||||
CONF_FRIENDLY_NAME: CONF_NAME,
|
||||
}
|
||||
|
||||
|
||||
def rewrite_common_legacy_to_modern_conf(
|
||||
hass: HomeAssistant,
|
||||
entity_cfg: dict[str, Any],
|
||||
extra_legacy_fields: dict[str, str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Rewrite legacy config."""
|
||||
entity_cfg = {**entity_cfg}
|
||||
if extra_legacy_fields is None:
|
||||
extra_legacy_fields = {}
|
||||
|
||||
for from_key, to_key in itertools.chain(
|
||||
LEGACY_FIELDS.items(), extra_legacy_fields.items()
|
||||
):
|
||||
if from_key not in entity_cfg or to_key in entity_cfg:
|
||||
continue
|
||||
|
||||
val = entity_cfg.pop(from_key)
|
||||
if isinstance(val, str):
|
||||
val = Template(val, hass)
|
||||
entity_cfg[to_key] = val
|
||||
|
||||
if CONF_NAME in entity_cfg and isinstance(entity_cfg[CONF_NAME], str):
|
||||
entity_cfg[CONF_NAME] = Template(entity_cfg[CONF_NAME], hass)
|
||||
|
||||
return entity_cfg
|
||||
|
||||
|
||||
class _TemplateAttribute:
|
||||
"""Attribute value linked to template result."""
|
||||
|
||||
@ -278,17 +240,11 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
availability_template: Template | None = None,
|
||||
icon_template: Template | None = None,
|
||||
entity_picture_template: Template | None = None,
|
||||
attribute_templates: dict[str, Template] | None = None,
|
||||
config: ConfigType | None = None,
|
||||
fallback_name: str | None = None,
|
||||
unique_id: str | None = None,
|
||||
config: ConfigType,
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Template Entity."""
|
||||
AbstractTemplateEntity.__init__(self, hass)
|
||||
AbstractTemplateEntity.__init__(self, hass, config)
|
||||
self._template_attrs: dict[Template, list[_TemplateAttribute]] = {}
|
||||
self._template_result_info: TrackTemplateResultInfo | None = None
|
||||
self._attr_extra_state_attributes = {}
|
||||
@ -307,22 +263,13 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
| None
|
||||
) = None
|
||||
self._run_variables: ScriptVariables | dict
|
||||
if config is None:
|
||||
self._attribute_templates = attribute_templates
|
||||
self._availability_template = availability_template
|
||||
self._icon_template = icon_template
|
||||
self._entity_picture_template = entity_picture_template
|
||||
self._friendly_name_template = None
|
||||
self._run_variables = {}
|
||||
self._blueprint_inputs = None
|
||||
else:
|
||||
self._attribute_templates = config.get(CONF_ATTRIBUTES)
|
||||
self._availability_template = config.get(CONF_AVAILABILITY)
|
||||
self._icon_template = config.get(CONF_ICON)
|
||||
self._entity_picture_template = config.get(CONF_PICTURE)
|
||||
self._friendly_name_template = config.get(CONF_NAME)
|
||||
self._run_variables = config.get(CONF_VARIABLES, {})
|
||||
self._blueprint_inputs = config.get("raw_blueprint_inputs")
|
||||
self._attribute_templates = config.get(CONF_ATTRIBUTES)
|
||||
self._availability_template = config.get(CONF_AVAILABILITY)
|
||||
self._icon_template = config.get(CONF_ICON)
|
||||
self._entity_picture_template = config.get(CONF_PICTURE)
|
||||
self._friendly_name_template = config.get(CONF_NAME)
|
||||
self._run_variables = config.get(CONF_VARIABLES, {})
|
||||
self._blueprint_inputs = config.get("raw_blueprint_inputs")
|
||||
|
||||
class DummyState(State):
|
||||
"""None-state for template entities not yet added to the state machine."""
|
||||
@ -340,7 +287,7 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
variables = {"this": DummyState()}
|
||||
|
||||
# Try to render the name as it can influence the entity ID
|
||||
self._attr_name = fallback_name
|
||||
self._attr_name = None
|
||||
if self._friendly_name_template:
|
||||
with contextlib.suppress(TemplateError):
|
||||
self._attr_name = self._friendly_name_template.async_render(
|
||||
|
@ -30,7 +30,7 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
|
||||
"""Initialize the entity."""
|
||||
CoordinatorEntity.__init__(self, coordinator)
|
||||
TriggerBaseEntity.__init__(self, hass, config)
|
||||
AbstractTemplateEntity.__init__(self, hass)
|
||||
AbstractTemplateEntity.__init__(self, hass, config)
|
||||
|
||||
self._state_render_error = False
|
||||
|
||||
|
@ -34,20 +34,18 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_OBJECT_ID, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import (
|
||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
||||
TEMPLATE_ENTITY_ATTRIBUTES_SCHEMA_LEGACY,
|
||||
TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY,
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_attributes_schema,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
@ -72,7 +70,7 @@ _VALID_STATES = [
|
||||
VacuumActivity.ERROR,
|
||||
]
|
||||
|
||||
LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | {
|
||||
LEGACY_FIELDS = {
|
||||
CONF_BATTERY_LEVEL_TEMPLATE: CONF_BATTERY_LEVEL,
|
||||
CONF_FAN_SPEED_TEMPLATE: CONF_FAN_SPEED,
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
@ -125,88 +123,31 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
def rewrite_legacy_to_modern_conf(
|
||||
hass: HomeAssistant, config: dict[str, dict]
|
||||
) -> list[dict]:
|
||||
"""Rewrite legacy switch configuration definitions to modern ones."""
|
||||
vacuums = []
|
||||
|
||||
for object_id, entity_conf in config.items():
|
||||
entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id}
|
||||
|
||||
entity_conf = rewrite_common_legacy_to_modern_conf(
|
||||
hass, entity_conf, LEGACY_FIELDS
|
||||
)
|
||||
|
||||
if CONF_NAME not in entity_conf:
|
||||
entity_conf[CONF_NAME] = template.Template(object_id, hass)
|
||||
|
||||
vacuums.append(entity_conf)
|
||||
|
||||
return vacuums
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the template switches."""
|
||||
vacuums = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
vacuums.append(
|
||||
TemplateVacuum(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(vacuums)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Template cover."""
|
||||
if discovery_info is None:
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
rewrite_legacy_to_modern_conf(hass, config[CONF_VACUUMS]),
|
||||
None,
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerVacuumEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
"""Set up the Template vacuum."""
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
VACUUM_DOMAIN,
|
||||
config,
|
||||
TemplateStateVacuumEntity,
|
||||
TriggerVacuumEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
LEGACY_FIELDS,
|
||||
legacy_key=CONF_VACUUMS,
|
||||
)
|
||||
|
||||
|
||||
class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||
"""Representation of a template vacuum features."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
@ -350,7 +291,7 @@ class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||
self._attr_fan_speed = None
|
||||
|
||||
|
||||
class TemplateVacuum(TemplateEntity, AbstractTemplateVacuum):
|
||||
class TemplateStateVacuumEntity(TemplateEntity, AbstractTemplateVacuum):
|
||||
"""A template vacuum component."""
|
||||
|
||||
_attr_should_poll = False
|
||||
@ -362,12 +303,8 @@ class TemplateVacuum(TemplateEntity, AbstractTemplateVacuum):
|
||||
unique_id,
|
||||
) -> None:
|
||||
"""Initialize the vacuum."""
|
||||
TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id)
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateVacuum.__init__(self, config)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
@ -31,16 +31,10 @@ from homeassistant.components.weather import (
|
||||
WeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_TEMPERATURE_UNIT,
|
||||
CONF_UNIQUE_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.const import CONF_TEMPERATURE_UNIT, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
@ -52,11 +46,8 @@ from homeassistant.util.unit_conversion import (
|
||||
)
|
||||
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .template_entity import (
|
||||
TemplateEntity,
|
||||
make_template_entity_common_modern_schema,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .helpers import async_setup_template_platform
|
||||
from .template_entity import TemplateEntity, make_template_entity_common_modern_schema
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
CHECK_FORECAST_KEYS = (
|
||||
@ -138,33 +129,6 @@ WEATHER_SCHEMA = vol.Schema(
|
||||
PLATFORM_SCHEMA = WEATHER_PLATFORM_SCHEMA.extend(WEATHER_SCHEMA.schema)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_template_tracking_entities(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
hass: HomeAssistant,
|
||||
definitions: list[dict],
|
||||
unique_id_prefix: str | None,
|
||||
) -> None:
|
||||
"""Create the weather entities."""
|
||||
entities = []
|
||||
|
||||
for entity_conf in definitions:
|
||||
unique_id = entity_conf.get(CONF_UNIQUE_ID)
|
||||
|
||||
if unique_id and unique_id_prefix:
|
||||
unique_id = f"{unique_id_prefix}-{unique_id}"
|
||||
|
||||
entities.append(
|
||||
WeatherTemplate(
|
||||
hass,
|
||||
entity_conf,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@ -172,39 +136,23 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Template weather."""
|
||||
if discovery_info is None:
|
||||
config = rewrite_common_legacy_to_modern_conf(hass, config)
|
||||
unique_id = config.get(CONF_UNIQUE_ID)
|
||||
async_add_entities(
|
||||
[
|
||||
WeatherTemplate(
|
||||
hass,
|
||||
config,
|
||||
unique_id,
|
||||
)
|
||||
]
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerWeatherEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
await async_setup_template_platform(
|
||||
hass,
|
||||
discovery_info["entities"],
|
||||
discovery_info["unique_id"],
|
||||
WEATHER_DOMAIN,
|
||||
config,
|
||||
StateWeatherEntity,
|
||||
TriggerWeatherEntity,
|
||||
async_add_entities,
|
||||
discovery_info,
|
||||
{},
|
||||
)
|
||||
|
||||
|
||||
class WeatherTemplate(TemplateEntity, WeatherEntity):
|
||||
class StateWeatherEntity(TemplateEntity, WeatherEntity):
|
||||
"""Representation of a weather condition."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -213,9 +161,8 @@ class WeatherTemplate(TemplateEntity, WeatherEntity):
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the Template weather."""
|
||||
super().__init__(hass, config=config, unique_id=unique_id)
|
||||
super().__init__(hass, config, unique_id)
|
||||
|
||||
name = self._attr_name
|
||||
self._condition_template = config[CONF_CONDITION_TEMPLATE]
|
||||
self._temperature_template = config[CONF_TEMPERATURE_TEMPLATE]
|
||||
self._humidity_template = config[CONF_HUMIDITY_TEMPLATE]
|
||||
@ -243,8 +190,6 @@ class WeatherTemplate(TemplateEntity, WeatherEntity):
|
||||
self._attr_native_visibility_unit = config.get(CONF_VISIBILITY_UNIT)
|
||||
self._attr_native_wind_speed_unit = config.get(CONF_WIND_SPEED_UNIT)
|
||||
|
||||
self.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, name, hass=hass)
|
||||
|
||||
self._condition = None
|
||||
self._temperature = None
|
||||
self._humidity = None
|
||||
@ -538,6 +483,7 @@ class WeatherExtraStoredData(ExtraStoredData):
|
||||
class TriggerWeatherEntity(TriggerEntity, WeatherEntity, RestoreEntity):
|
||||
"""Sensor entity based on trigger data."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
domain = WEATHER_DOMAIN
|
||||
extra_template_keys = (
|
||||
CONF_CONDITION_TEMPLATE,
|
||||
@ -553,6 +499,7 @@ class TriggerWeatherEntity(TriggerEntity, WeatherEntity, RestoreEntity):
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(hass, coordinator, config)
|
||||
|
||||
self._attr_native_precipitation_unit = config.get(CONF_PRECIPITATION_UNIT)
|
||||
self._attr_native_pressure_unit = config.get(CONF_PRESSURE_UNIT)
|
||||
self._attr_native_temperature_unit = config.get(CONF_TEMPERATURE_UNIT)
|
||||
|
@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==1.2.0"]
|
||||
"requirements": ["tesla-fleet-api==1.2.2"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==1.2.0", "teslemetry-stream==0.7.9"]
|
||||
"requirements": ["tesla-fleet-api==1.2.2", "teslemetry-stream==0.7.9"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tessie", "tesla-fleet-api"],
|
||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.2.0"]
|
||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.2.2"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/touchline_sl",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pytouchlinesl==0.3.0"]
|
||||
"requirements": ["pytouchlinesl==0.4.0"]
|
||||
}
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
@ -417,8 +416,6 @@ class UnitOfMeasurement:
|
||||
device_classes: set[str]
|
||||
|
||||
aliases: set[str] = field(default_factory=set)
|
||||
conversion_unit: str | None = None
|
||||
conversion_fn: Callable[[float], float] | None = None
|
||||
|
||||
|
||||
# A tuple of available units of measurements we can work with.
|
||||
@ -458,8 +455,6 @@ UNITS = (
|
||||
SensorDeviceClass.CO,
|
||||
SensorDeviceClass.CO2,
|
||||
},
|
||||
conversion_unit=CONCENTRATION_PARTS_PER_MILLION,
|
||||
conversion_fn=lambda x: x / 1000,
|
||||
),
|
||||
UnitOfMeasurement(
|
||||
unit=UnitOfElectricCurrent.AMPERE,
|
||||
@ -470,8 +465,6 @@ UNITS = (
|
||||
unit=UnitOfElectricCurrent.MILLIAMPERE,
|
||||
aliases={"ma", "milliampere"},
|
||||
device_classes={SensorDeviceClass.CURRENT},
|
||||
conversion_unit=UnitOfElectricCurrent.AMPERE,
|
||||
conversion_fn=lambda x: x / 1000,
|
||||
),
|
||||
UnitOfMeasurement(
|
||||
unit=UnitOfEnergy.WATT_HOUR,
|
||||
@ -527,8 +520,6 @@ UNITS = (
|
||||
SensorDeviceClass.SULPHUR_DIOXIDE,
|
||||
SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||
},
|
||||
conversion_unit=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
conversion_fn=lambda x: x * 1000,
|
||||
),
|
||||
UnitOfMeasurement(
|
||||
unit=UnitOfPower.WATT,
|
||||
@ -596,8 +587,6 @@ UNITS = (
|
||||
unit=UnitOfElectricPotential.MILLIVOLT,
|
||||
aliases={"mv", "millivolt"},
|
||||
device_classes={SensorDeviceClass.VOLTAGE},
|
||||
conversion_unit=UnitOfElectricPotential.VOLT,
|
||||
conversion_fn=lambda x: x / 1000,
|
||||
),
|
||||
)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user