mirror of
https://github.com/home-assistant/core.git
synced 2025-09-21 02:49:32 +00:00
Compare commits
1 Commits
add-includ
...
condition_
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1c4baa8dca |
25
.github/workflows/ci.yaml
vendored
25
.github/workflows/ci.yaml
vendored
@@ -523,24 +523,22 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
- name: Restore apt cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
id: cache-apt
|
||||
uses: actions/cache@v4.2.4
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
if: |
|
||||
steps.cache-venv.outputs.cache-hit != 'true'
|
||||
|| steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
mkdir -p ${{ env.APT_CACHE_DIR }}
|
||||
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
|
||||
fi
|
||||
@@ -565,18 +563,9 @@ jobs:
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
|
||||
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
|
10
CODEOWNERS
generated
10
CODEOWNERS
generated
@@ -107,8 +107,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/ambient_station/ @bachya
|
||||
/tests/components/ambient_station/ @bachya
|
||||
/homeassistant/components/amcrest/ @flacjacket
|
||||
/homeassistant/components/analytics/ @home-assistant/core
|
||||
/tests/components/analytics/ @home-assistant/core
|
||||
/homeassistant/components/analytics/ @home-assistant/core @ludeeus
|
||||
/tests/components/analytics/ @home-assistant/core @ludeeus
|
||||
/homeassistant/components/analytics_insights/ @joostlek
|
||||
/tests/components/analytics_insights/ @joostlek
|
||||
/homeassistant/components/android_ip_webcam/ @engrbm87
|
||||
@@ -1533,8 +1533,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/switchbee/ @jafar-atili
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/tests/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
|
||||
@@ -1710,8 +1710,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
/tests/components/vilfo/ @ManneW
|
||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||
|
@@ -2,31 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from homeassistant.components.media_source import MediaSource, local_source
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DATA_MEDIA_SOURCE, DOMAIN, IMAGE_DIR
|
||||
|
||||
|
||||
async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
|
||||
"""Set up local media source."""
|
||||
media_dirs = list(hass.config.media_dirs.values())
|
||||
|
||||
if not media_dirs:
|
||||
raise HomeAssistantError(
|
||||
"AI Task media source requires at least one media directory configured"
|
||||
)
|
||||
|
||||
media_dir = Path(media_dirs[0]) / DOMAIN / IMAGE_DIR
|
||||
media_dir = hass.config.path(f"{DOMAIN}/{IMAGE_DIR}")
|
||||
|
||||
hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"AI Generated Images",
|
||||
{IMAGE_DIR: str(media_dir)},
|
||||
{IMAGE_DIR: media_dir},
|
||||
f"/{DOMAIN}",
|
||||
)
|
||||
return source
|
||||
|
@@ -12,7 +12,7 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, image, media_source
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -31,14 +31,14 @@ from .const import (
|
||||
)
|
||||
|
||||
|
||||
def _save_camera_snapshot(image_data: camera.Image | image.Image) -> Path:
|
||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image_data.content_type, False),
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image_data.content)
|
||||
temp_file.write(image.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
@@ -54,31 +54,26 @@ async def _resolve_attachments(
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for certain media sources
|
||||
for integration in camera, image:
|
||||
media_source_prefix = f"media-source://{integration.DOMAIN}/"
|
||||
if not media_content_id.startswith(media_source_prefix):
|
||||
continue
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix(media_source_prefix)
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
|
||||
# Get snapshot from entity
|
||||
image_data = await integration.async_get_image(hass, entity_id)
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image_data
|
||||
_save_camera_snapshot, image
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image_data.content_type,
|
||||
mime_type=image.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
break
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "analytics",
|
||||
"name": "Analytics",
|
||||
"after_dependencies": ["energy", "hassio", "recorder"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
||||
"dependencies": ["api", "websocket_api", "http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||
"integration_type": "system",
|
||||
|
@@ -467,10 +467,7 @@ async def async_setup_entry(
|
||||
# periodical (or manual) self test since last daemon restart. It might not be available
|
||||
# when we set up the integration, and we do not know if it would ever be available. Here we
|
||||
# add it anyway and mark it as unknown initially.
|
||||
#
|
||||
# We also sort the resources to ensure the order of entities created is deterministic since
|
||||
# "APCMODEL" and "MODEL" resources map to the same "Model" name.
|
||||
for resource in sorted(available_resources | {LAST_S_TEST}):
|
||||
for resource in available_resources | {LAST_S_TEST}:
|
||||
if resource not in SENSORS:
|
||||
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
|
||||
continue
|
||||
|
@@ -120,7 +120,6 @@ class AsusWrtBridge(ABC):
|
||||
|
||||
def __init__(self, host: str) -> None:
|
||||
"""Initialize Bridge."""
|
||||
self._configuration_url = f"http://{host}"
|
||||
self._host = host
|
||||
self._firmware: str | None = None
|
||||
self._label_mac: str | None = None
|
||||
@@ -128,11 +127,6 @@ class AsusWrtBridge(ABC):
|
||||
self._model_id: str | None = None
|
||||
self._serial_number: str | None = None
|
||||
|
||||
@property
|
||||
def configuration_url(self) -> str:
|
||||
"""Return configuration URL."""
|
||||
return self._configuration_url
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
"""Return hostname."""
|
||||
@@ -377,7 +371,6 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
# get main router properties
|
||||
if mac := _identity.mac:
|
||||
self._label_mac = format_mac(mac)
|
||||
self._configuration_url = self._api.webpanel
|
||||
self._firmware = str(_identity.firmware)
|
||||
self._model = _identity.model
|
||||
self._model_id = _identity.product_id
|
||||
|
@@ -388,13 +388,13 @@ class AsusWrtRouter:
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device information."""
|
||||
info = DeviceInfo(
|
||||
configuration_url=self._api.configuration_url,
|
||||
identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")},
|
||||
name=self.host,
|
||||
model=self._api.model or "Asus Router",
|
||||
model_id=self._api.model_id,
|
||||
serial_number=self._api.serial_number,
|
||||
manufacturer="Asus",
|
||||
configuration_url=f"http://{self.host}",
|
||||
)
|
||||
if self._api.firmware:
|
||||
info["sw_version"] = self._api.firmware
|
||||
|
@@ -29,5 +29,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
|
||||
"requirements": ["yalexs==9.0.1", "yalexs-ble==3.1.2"]
|
||||
}
|
||||
|
@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
"ai_task/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
|
@@ -37,10 +37,6 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.loader import (
|
||||
async_get_custom_components,
|
||||
async_get_loaded_integration,
|
||||
)
|
||||
from homeassistant.util.location import async_detect_location_info
|
||||
|
||||
from .alexa_config import entity_supported as entity_supported_by_alexa
|
||||
@@ -435,79 +431,6 @@ class DownloadSupportPackageView(HomeAssistantView):
|
||||
url = "/api/cloud/support_package"
|
||||
name = "api:cloud:support_package"
|
||||
|
||||
async def _get_integration_info(self, hass: HomeAssistant) -> dict[str, Any]:
|
||||
"""Collect information about active and custom integrations."""
|
||||
# Get loaded components from hass.config.components
|
||||
loaded_components = hass.config.components.copy()
|
||||
|
||||
# Get custom integrations
|
||||
custom_domains = set()
|
||||
with suppress(Exception):
|
||||
custom_domains = set(await async_get_custom_components(hass))
|
||||
|
||||
# Separate built-in and custom integrations
|
||||
builtin_integrations = []
|
||||
custom_integrations = []
|
||||
|
||||
for domain in sorted(loaded_components):
|
||||
try:
|
||||
integration = async_get_loaded_integration(hass, domain)
|
||||
except Exception: # noqa: BLE001
|
||||
# Broad exception catch for robustness in support package
|
||||
# generation. If we can't get integration info,
|
||||
# just add the domain
|
||||
if domain in custom_domains:
|
||||
custom_integrations.append(
|
||||
{
|
||||
"domain": domain,
|
||||
"name": "Unknown",
|
||||
"version": "Unknown",
|
||||
"documentation": "Unknown",
|
||||
}
|
||||
)
|
||||
else:
|
||||
builtin_integrations.append(
|
||||
{
|
||||
"domain": domain,
|
||||
"name": "Unknown",
|
||||
}
|
||||
)
|
||||
else:
|
||||
if domain in custom_domains:
|
||||
# This is a custom integration
|
||||
# include version and documentation link
|
||||
version = (
|
||||
str(integration.version) if integration.version else "Unknown"
|
||||
)
|
||||
if not (documentation := integration.documentation):
|
||||
documentation = "Unknown"
|
||||
|
||||
custom_integrations.append(
|
||||
{
|
||||
"domain": domain,
|
||||
"name": integration.name,
|
||||
"version": version,
|
||||
"documentation": documentation,
|
||||
}
|
||||
)
|
||||
else:
|
||||
# This is a built-in integration.
|
||||
# No version needed, as it is always the same as the
|
||||
# Home Assistant version
|
||||
builtin_integrations.append(
|
||||
{
|
||||
"domain": domain,
|
||||
"name": integration.name,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"builtin_count": len(builtin_integrations),
|
||||
"builtin_integrations": builtin_integrations,
|
||||
"custom_count": len(custom_integrations),
|
||||
"custom_integrations": custom_integrations,
|
||||
}
|
||||
|
||||
async def _generate_markdown(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
@@ -530,38 +453,6 @@ class DownloadSupportPackageView(HomeAssistantView):
|
||||
markdown = "## System Information\n\n"
|
||||
markdown += get_domain_table_markdown(hass_info)
|
||||
|
||||
# Add integration information
|
||||
try:
|
||||
integration_info = await self._get_integration_info(hass)
|
||||
except Exception: # noqa: BLE001
|
||||
# Broad exception catch for robustness in support package generation
|
||||
# If there's any error getting integration info, just note it
|
||||
markdown += "## Active integrations\n\n"
|
||||
markdown += "Unable to collect integration information\n\n"
|
||||
else:
|
||||
markdown += "## Active Integrations\n\n"
|
||||
markdown += f"Built-in integrations: {integration_info['builtin_count']}\n"
|
||||
markdown += f"Custom integrations: {integration_info['custom_count']}\n\n"
|
||||
|
||||
# Built-in integrations
|
||||
if integration_info["builtin_integrations"]:
|
||||
markdown += "<details><summary>Built-in integrations</summary>\n\n"
|
||||
markdown += "Domain | Name\n"
|
||||
markdown += "--- | ---\n"
|
||||
for integration in integration_info["builtin_integrations"]:
|
||||
markdown += f"{integration['domain']} | {integration['name']}\n"
|
||||
markdown += "\n</details>\n\n"
|
||||
|
||||
# Custom integrations
|
||||
if integration_info["custom_integrations"]:
|
||||
markdown += "<details><summary>Custom integrations</summary>\n\n"
|
||||
markdown += "Domain | Name | Version | Documentation\n"
|
||||
markdown += "--- | --- | --- | ---\n"
|
||||
for integration in integration_info["custom_integrations"]:
|
||||
doc_url = integration.get("documentation") or "N/A"
|
||||
markdown += f"{integration['domain']} | {integration['name']} | {integration['version']} | {doc_url}\n"
|
||||
markdown += "\n</details>\n\n"
|
||||
|
||||
for domain, domain_info in domains_info.items():
|
||||
domain_info_md = get_domain_table_markdown(domain_info)
|
||||
markdown += (
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from abc import abstractmethod
|
||||
from datetime import timedelta
|
||||
from typing import Any, TypeVar
|
||||
from typing import TypeVar
|
||||
|
||||
from aiocomelit.api import (
|
||||
AlarmDataObject,
|
||||
@@ -13,16 +13,7 @@ from aiocomelit.api import (
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
)
|
||||
from aiocomelit.const import (
|
||||
BRIDGE,
|
||||
CLIMATE,
|
||||
COVER,
|
||||
IRRIGATION,
|
||||
LIGHT,
|
||||
OTHER,
|
||||
SCENARIO,
|
||||
VEDO,
|
||||
)
|
||||
from aiocomelit.const import BRIDGE, VEDO
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession
|
||||
|
||||
@@ -120,32 +111,6 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
async def _async_update_system_data(self) -> T:
|
||||
"""Class method for updating data."""
|
||||
|
||||
async def _async_remove_stale_devices(
|
||||
self,
|
||||
previous_list: dict[int, Any],
|
||||
current_list: dict[int, Any],
|
||||
dev_type: str,
|
||||
) -> None:
|
||||
"""Remove stale devices."""
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
for i in previous_list:
|
||||
if i not in current_list:
|
||||
_LOGGER.debug(
|
||||
"Detected change in %s devices: index %s removed",
|
||||
dev_type,
|
||||
i,
|
||||
)
|
||||
identifier = f"{self.config_entry.entry_id}-{dev_type}-{i}"
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, identifier)}
|
||||
)
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
|
||||
|
||||
class ComelitSerialBridge(
|
||||
ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]]
|
||||
@@ -172,15 +137,7 @@ class ComelitSerialBridge(
|
||||
self,
|
||||
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
|
||||
"""Specific method for updating data."""
|
||||
data = await self.api.get_all_devices()
|
||||
|
||||
if self.data:
|
||||
for dev_type in (CLIMATE, COVER, LIGHT, IRRIGATION, OTHER, SCENARIO):
|
||||
await self._async_remove_stale_devices(
|
||||
self.data[dev_type], data[dev_type], dev_type
|
||||
)
|
||||
|
||||
return data
|
||||
return await self.api.get_all_devices()
|
||||
|
||||
|
||||
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
@@ -206,14 +163,4 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
self,
|
||||
) -> AlarmDataObject:
|
||||
"""Specific method for updating data."""
|
||||
data = await self.api.get_all_areas_and_zones()
|
||||
|
||||
if self.data:
|
||||
for obj_type in ("alarm_areas", "alarm_zones"):
|
||||
await self._async_remove_stale_devices(
|
||||
self.data[obj_type],
|
||||
data[obj_type],
|
||||
"area" if obj_type == "alarm_areas" else "zone",
|
||||
)
|
||||
|
||||
return data
|
||||
return await self.api.get_all_areas_and_zones()
|
||||
|
@@ -72,7 +72,9 @@ rules:
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices: done
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: missing implementation
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
@@ -50,13 +50,14 @@ from .const import (
|
||||
ATTR_LANGUAGE,
|
||||
ATTR_TEXT,
|
||||
DATA_COMPONENT,
|
||||
DATA_DEFAULT_ENTITY,
|
||||
DOMAIN,
|
||||
HOME_ASSISTANT_AGENT,
|
||||
SERVICE_PROCESS,
|
||||
SERVICE_RELOAD,
|
||||
ConversationEntityFeature,
|
||||
)
|
||||
from .default_agent import async_setup_default_agent
|
||||
from .default_agent import DefaultAgent, async_setup_default_agent
|
||||
from .entity import ConversationEntity
|
||||
from .http import async_setup as async_setup_conversation_http
|
||||
from .models import AbstractConversationAgent, ConversationInput, ConversationResult
|
||||
@@ -141,7 +142,7 @@ def async_unset_agent(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Unset the agent to handle the conversations."""
|
||||
"""Set the agent to handle the conversations."""
|
||||
get_agent_manager(hass).async_unset_agent(config_entry.entry_id)
|
||||
|
||||
|
||||
@@ -240,10 +241,10 @@ async def async_handle_sentence_triggers(
|
||||
|
||||
Returns None if no match occurred.
|
||||
"""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
default_agent = async_get_agent(hass)
|
||||
assert isinstance(default_agent, DefaultAgent)
|
||||
|
||||
return await agent.async_handle_sentence_triggers(user_input)
|
||||
return await default_agent.async_handle_sentence_triggers(user_input)
|
||||
|
||||
|
||||
async def async_handle_intents(
|
||||
@@ -256,10 +257,12 @@ async def async_handle_intents(
|
||||
|
||||
Returns None if no match occurred.
|
||||
"""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
default_agent = async_get_agent(hass)
|
||||
assert isinstance(default_agent, DefaultAgent)
|
||||
|
||||
return await agent.async_handle_intents(user_input, intent_filter=intent_filter)
|
||||
return await default_agent.async_handle_intents(
|
||||
user_input, intent_filter=intent_filter
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
@@ -295,9 +298,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def handle_reload(service: ServiceCall) -> None:
|
||||
"""Reload intents."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
if agent is not None:
|
||||
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
|
||||
await hass.data[DATA_DEFAULT_ENTITY].async_reload(
|
||||
language=service.data.get(ATTR_LANGUAGE)
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.core import Context, HomeAssistant, async_get_hass, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, intent, singleton
|
||||
|
||||
from .const import DATA_COMPONENT, HOME_ASSISTANT_AGENT
|
||||
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY, HOME_ASSISTANT_AGENT
|
||||
from .entity import ConversationEntity
|
||||
from .models import (
|
||||
AbstractConversationAgent,
|
||||
@@ -28,9 +28,6 @@ from .trace import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .default_agent import DefaultAgent
|
||||
|
||||
|
||||
@singleton.singleton("conversation_agent")
|
||||
@callback
|
||||
@@ -52,10 +49,8 @@ def async_get_agent(
|
||||
hass: HomeAssistant, agent_id: str | None = None
|
||||
) -> AbstractConversationAgent | ConversationEntity | None:
|
||||
"""Get specified agent."""
|
||||
manager = get_agent_manager(hass)
|
||||
|
||||
if agent_id is None or agent_id == HOME_ASSISTANT_AGENT:
|
||||
return manager.default_agent
|
||||
return hass.data[DATA_DEFAULT_ENTITY]
|
||||
|
||||
if "." in agent_id:
|
||||
return hass.data[DATA_COMPONENT].get_entity(agent_id)
|
||||
@@ -139,7 +134,6 @@ class AgentManager:
|
||||
"""Initialize the conversation agents."""
|
||||
self.hass = hass
|
||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||
self.default_agent: DefaultAgent | None = None
|
||||
|
||||
@callback
|
||||
def async_get_agent(self, agent_id: str) -> AbstractConversationAgent | None:
|
||||
@@ -188,7 +182,3 @@ class AgentManager:
|
||||
def async_unset_agent(self, agent_id: str) -> None:
|
||||
"""Unset the agent."""
|
||||
self._agents.pop(agent_id, None)
|
||||
|
||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||
"""Set up the default agent."""
|
||||
self.default_agent = agent
|
||||
|
@@ -10,9 +10,11 @@ from homeassistant.util.hass_dict import HassKey
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from .default_agent import DefaultAgent
|
||||
from .entity import ConversationEntity
|
||||
|
||||
DOMAIN = "conversation"
|
||||
DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
HOME_ASSISTANT_AGENT = "conversation.home_assistant"
|
||||
|
||||
ATTR_TEXT = "text"
|
||||
@@ -24,6 +26,7 @@ SERVICE_PROCESS = "process"
|
||||
SERVICE_RELOAD = "reload"
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[ConversationEntity]] = HassKey(DOMAIN)
|
||||
DATA_DEFAULT_ENTITY: HassKey[DefaultAgent] = HassKey(f"{DOMAIN}_default_entity")
|
||||
|
||||
|
||||
class ConversationEntityFeature(IntFlag):
|
||||
|
@@ -68,9 +68,13 @@ from homeassistant.helpers.event import async_track_state_added_domain
|
||||
from homeassistant.util import language as language_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .chat_log import AssistantContent, ChatLog
|
||||
from .const import DOMAIN, ConversationEntityFeature
|
||||
from .const import (
|
||||
DATA_DEFAULT_ENTITY,
|
||||
DEFAULT_EXPOSED_ATTRIBUTES,
|
||||
DOMAIN,
|
||||
ConversationEntityFeature,
|
||||
)
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput, ConversationResult
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
@@ -79,8 +83,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
|
||||
_ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||
|
||||
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
|
||||
REGEX_TYPE = type(re.compile(""))
|
||||
TRIGGER_CALLBACK_TYPE = Callable[
|
||||
[ConversationInput, RecognizeResult], Awaitable[str | None]
|
||||
@@ -207,9 +209,9 @@ async def async_setup_default_agent(
|
||||
config_intents: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set up entity registry listener for the default agent."""
|
||||
agent = DefaultAgent(hass, config_intents)
|
||||
await entity_component.async_add_entities([agent])
|
||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||
entity = DefaultAgent(hass, config_intents)
|
||||
await entity_component.async_add_entities([entity])
|
||||
hass.data[DATA_DEFAULT_ENTITY] = entity
|
||||
|
||||
@core.callback
|
||||
def async_entity_state_listener(
|
||||
@@ -844,7 +846,7 @@ class DefaultAgent(ConversationEntity):
|
||||
context = {"domain": state.domain}
|
||||
if state.attributes:
|
||||
# Include some attributes
|
||||
for attr in _DEFAULT_EXPOSED_ATTRIBUTES:
|
||||
for attr in DEFAULT_EXPOSED_ATTRIBUTES:
|
||||
if attr not in state.attributes:
|
||||
continue
|
||||
context[attr] = state.attributes[attr]
|
||||
|
@@ -25,7 +25,7 @@ from .agent_manager import (
|
||||
async_get_agent,
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT
|
||||
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
|
||||
from .default_agent import (
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
@@ -169,8 +169,7 @@ async def websocket_list_sentences(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""List custom registered sentences."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
agent = hass.data[DATA_DEFAULT_ENTITY]
|
||||
|
||||
sentences = []
|
||||
for trigger_data in agent.trigger_sentences:
|
||||
@@ -192,8 +191,7 @@ async def websocket_hass_agent_debug(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""Return intents that would be matched by the default agent for a list of sentences."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
agent = hass.data[DATA_DEFAULT_ENTITY]
|
||||
|
||||
# Return results for each sentence in the same order as the input.
|
||||
result_dicts: list[dict[str, Any] | None] = []
|
||||
|
@@ -20,8 +20,7 @@ from homeassistant.helpers.script import ScriptRunResult
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .const import DOMAIN
|
||||
from .const import DATA_DEFAULT_ENTITY, DOMAIN
|
||||
from .models import ConversationInput
|
||||
|
||||
|
||||
@@ -124,6 +123,4 @@ async def async_attach_trigger(
|
||||
# two trigger copies for who will provide a response.
|
||||
return None
|
||||
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
return agent.register_trigger(sentences, call_action)
|
||||
return hass.data[DATA_DEFAULT_ENTITY].register_trigger(sentences, call_action)
|
||||
|
@@ -1,23 +0,0 @@
|
||||
"""Diagnostics support for derivative."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
registry = er.async_get(hass)
|
||||
entities = registry.entities.get_entries_for_config_entry_id(config_entry.entry_id)
|
||||
|
||||
return {
|
||||
"config_entry": config_entry.as_dict(),
|
||||
"entity": [entity.extended_dict for entity in entities],
|
||||
}
|
@@ -227,28 +227,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
weight = calculate_weight(start, end, current_time)
|
||||
derivative = derivative + (value * Decimal(weight))
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: Calculated new derivative as %f from %d segments",
|
||||
self.entity_id,
|
||||
derivative,
|
||||
len(self._state_list),
|
||||
)
|
||||
|
||||
return derivative
|
||||
|
||||
def _prune_state_list(self, current_time: datetime) -> None:
|
||||
# filter out all derivatives older than `time_window` from our window list
|
||||
old_len = len(self._state_list)
|
||||
self._state_list = [
|
||||
(time_start, time_end, state)
|
||||
for time_start, time_end, state in self._state_list
|
||||
if (current_time - time_end).total_seconds() < self._time_window
|
||||
]
|
||||
_LOGGER.debug(
|
||||
"%s: Pruned %d elements from state list",
|
||||
self.entity_id,
|
||||
old_len - len(self._state_list),
|
||||
)
|
||||
|
||||
def _handle_invalid_source_state(self, state: State | None) -> bool:
|
||||
# Check the source state for unknown/unavailable condition. If unusable, write unknown/unavailable state and return false.
|
||||
@@ -305,10 +292,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
) -> None:
|
||||
"""Calculate derivative based on time and reschedule."""
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: Recalculating derivative due to max_sub_interval time elapsed",
|
||||
self.entity_id,
|
||||
)
|
||||
self._prune_state_list(now)
|
||||
derivative = self._calc_derivative_from_state_list(now)
|
||||
self._write_native_value(derivative)
|
||||
@@ -317,11 +300,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
if derivative != 0:
|
||||
schedule_max_sub_interval_exceeded(source_state)
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: Scheduling max_sub_interval_callback in %s",
|
||||
self.entity_id,
|
||||
self._max_sub_interval,
|
||||
)
|
||||
self._cancel_max_sub_interval_exceeded_callback = async_call_later(
|
||||
self.hass,
|
||||
self._max_sub_interval,
|
||||
@@ -331,9 +309,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
@callback
|
||||
def on_state_reported(event: Event[EventStateReportedData]) -> None:
|
||||
"""Handle constant sensor state."""
|
||||
_LOGGER.debug(
|
||||
"%s: New state reported event: %s", self.entity_id, event.data
|
||||
)
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
new_state = event.data["new_state"]
|
||||
if not self._handle_invalid_source_state(new_state):
|
||||
@@ -355,7 +330,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
_LOGGER.debug("%s: New state changed event: %s", self.entity_id, event.data)
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
new_state = event.data["new_state"]
|
||||
if not self._handle_invalid_source_state(new_state):
|
||||
@@ -408,32 +382,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
/ Decimal(self._unit_prefix)
|
||||
* Decimal(self._unit_time)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"%s: Calculated new derivative segment as %f / %f / %f * %f = %f",
|
||||
self.entity_id,
|
||||
delta_value,
|
||||
elapsed_time,
|
||||
self._unit_prefix,
|
||||
self._unit_time,
|
||||
new_derivative,
|
||||
)
|
||||
|
||||
except ValueError as err:
|
||||
_LOGGER.warning(
|
||||
"%s: While calculating derivative: %s", self.entity_id, err
|
||||
)
|
||||
_LOGGER.warning("While calculating derivative: %s", err)
|
||||
except DecimalException as err:
|
||||
_LOGGER.warning(
|
||||
"%s: Invalid state (%s > %s): %s",
|
||||
self.entity_id,
|
||||
old_value,
|
||||
new_state.state,
|
||||
err,
|
||||
"Invalid state (%s > %s): %s", old_value, new_state.state, err
|
||||
)
|
||||
except AssertionError as err:
|
||||
_LOGGER.error(
|
||||
"%s: Could not calculate derivative: %s", self.entity_id, err
|
||||
)
|
||||
_LOGGER.error("Could not calculate derivative: %s", err)
|
||||
|
||||
# For total inreasing sensors, the value is expected to continuously increase.
|
||||
# A negative derivative for a total increasing sensor likely indicates the
|
||||
@@ -443,10 +400,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
== SensorStateClass.TOTAL_INCREASING
|
||||
and new_derivative < 0
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"%s: Dropping sample as source total_increasing sensor decreased",
|
||||
self.entity_id,
|
||||
)
|
||||
return
|
||||
|
||||
# add latest derivative to the window list
|
||||
|
@@ -234,17 +234,6 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
EcoWittSensorTypes.DISTANCE_MM: SensorEntityDescription(
|
||||
key="DISTANCE_MM",
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
native_unit_of_measurement=UnitOfLength.MILLIMETERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
EcoWittSensorTypes.HEAT_COUNT: SensorEntityDescription(
|
||||
key="HEAT_COUNT",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EcoWittSensorTypes.PM1: SensorEntityDescription(
|
||||
key="PM1",
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.1.0",
|
||||
"aioesphomeapi==41.0.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["google-genai==1.38.0"]
|
||||
"requirements": ["google-genai==1.29.0"]
|
||||
}
|
||||
|
@@ -193,7 +193,7 @@
|
||||
},
|
||||
"unsupported_docker_version": {
|
||||
"title": "Unsupported system - Docker version",
|
||||
"description": "System is unsupported because the Docker version is out of date. For information about the required version and how to fix this, select Learn more."
|
||||
"description": "System is unsupported because the wrong version of Docker is in use. Use the link to learn the correct version and how to fix this."
|
||||
},
|
||||
"unsupported_job_conditions": {
|
||||
"title": "Unsupported system - Protections disabled",
|
||||
@@ -209,7 +209,7 @@
|
||||
},
|
||||
"unsupported_os": {
|
||||
"title": "Unsupported system - Operating System",
|
||||
"description": "System is unsupported because the operating system in use is not tested or maintained for use with Supervisor. For information about supported operating systems and how to fix this, select Learn more."
|
||||
"description": "System is unsupported because the operating system in use is not tested or maintained for use with Supervisor. Use the link to which operating systems are supported and how to fix this."
|
||||
},
|
||||
"unsupported_os_agent": {
|
||||
"title": "Unsupported system - OS-Agent issues",
|
||||
|
@@ -1,23 +0,0 @@
|
||||
"""Diagnostics support for history_stats."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
registry = er.async_get(hass)
|
||||
entities = registry.entities.get_entries_for_config_entry_id(config_entry.entry_id)
|
||||
|
||||
return {
|
||||
"config_entry": config_entry.as_dict(),
|
||||
"entity": [entity.extended_dict for entity in entities],
|
||||
}
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.81", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.80", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -103,7 +103,6 @@ class HomeAssistantConnectZBT2ConfigFlow(
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
ZIGBEE_BAUDRATE = 460800
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Initialize the config flow."""
|
||||
|
@@ -52,16 +52,8 @@
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
@@ -83,29 +75,6 @@
|
||||
"confirm_otbr": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]"
|
||||
},
|
||||
"zigbee_installation_type": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::description%]",
|
||||
"menu_options": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_custom%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_custom%]"
|
||||
}
|
||||
},
|
||||
"zigbee_integration": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::title%]",
|
||||
"menu_options": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_other%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_other%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -142,15 +111,7 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
@@ -172,29 +133,6 @@
|
||||
"confirm_otbr": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]"
|
||||
},
|
||||
"zigbee_installation_type": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::description%]",
|
||||
"menu_options": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_custom%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_custom%]"
|
||||
}
|
||||
},
|
||||
"zigbee_integration": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::title%]",
|
||||
"menu_options": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_other%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_other%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
import asyncio
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -24,7 +23,6 @@ from homeassistant.config_entries import (
|
||||
ConfigEntryBaseFlow,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
FlowType,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
@@ -50,31 +48,13 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_PICK_FIRMWARE_THREAD = "pick_firmware_thread"
|
||||
STEP_PICK_FIRMWARE_ZIGBEE = "pick_firmware_zigbee"
|
||||
STEP_PICK_FIRMWARE_THREAD_MIGRATE = "pick_firmware_thread_migrate"
|
||||
STEP_PICK_FIRMWARE_ZIGBEE_MIGRATE = "pick_firmware_zigbee_migrate"
|
||||
|
||||
|
||||
class PickedFirmwareType(StrEnum):
|
||||
"""Firmware types that can be picked."""
|
||||
|
||||
THREAD = "thread"
|
||||
ZIGBEE = "zigbee"
|
||||
|
||||
|
||||
class ZigbeeIntegration(StrEnum):
|
||||
"""Zigbee integrations that can be picked."""
|
||||
|
||||
OTHER = "other"
|
||||
ZHA = "zha"
|
||||
|
||||
|
||||
class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Base flow to install firmware."""
|
||||
|
||||
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
|
||||
_failed_addon_name: str
|
||||
_failed_addon_reason: str
|
||||
_picked_firmware_type: PickedFirmwareType
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Instantiate base flow."""
|
||||
@@ -83,7 +63,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self._probed_firmware_info: FirmwareInfo | None = None
|
||||
self._device: str | None = None # To be set in a subclass
|
||||
self._hardware_name: str = "unknown" # To be set in a subclass
|
||||
self._zigbee_integration = ZigbeeIntegration.ZHA
|
||||
|
||||
self.addon_install_task: asyncio.Task | None = None
|
||||
self.addon_start_task: asyncio.Task | None = None
|
||||
@@ -126,23 +105,11 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pick Thread or Zigbee firmware."""
|
||||
# Determine if ZHA or Thread are already configured to present migrate options
|
||||
zha_entries = self.hass.config_entries.async_entries(ZHA_DOMAIN)
|
||||
otbr_entries = self.hass.config_entries.async_entries(OTBR_DOMAIN)
|
||||
|
||||
return self.async_show_menu(
|
||||
step_id="pick_firmware",
|
||||
menu_options=[
|
||||
(
|
||||
STEP_PICK_FIRMWARE_ZIGBEE_MIGRATE
|
||||
if zha_entries
|
||||
else STEP_PICK_FIRMWARE_ZIGBEE
|
||||
),
|
||||
(
|
||||
STEP_PICK_FIRMWARE_THREAD_MIGRATE
|
||||
if otbr_entries
|
||||
else STEP_PICK_FIRMWARE_THREAD
|
||||
),
|
||||
STEP_PICK_FIRMWARE_ZIGBEE,
|
||||
STEP_PICK_FIRMWARE_THREAD,
|
||||
],
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
@@ -288,45 +255,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
async def _configure_and_start_otbr_addon(self) -> None:
|
||||
"""Configure and start the OTBR addon."""
|
||||
|
||||
# Before we start the addon, confirm that the correct firmware is running
|
||||
# and populate `self._probed_firmware_info` with the correct information
|
||||
if not await self._probe_firmware_info(probe_methods=(ApplicationType.SPINEL,)):
|
||||
raise AbortFlow(
|
||||
"unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
assert self._device is not None
|
||||
new_addon_config = {
|
||||
**addon_info.options,
|
||||
"device": self._device,
|
||||
"baudrate": 460800,
|
||||
"flow_control": True,
|
||||
"autoflash_firmware": False,
|
||||
}
|
||||
|
||||
_LOGGER.debug("Reconfiguring OTBR addon with %s", new_addon_config)
|
||||
|
||||
try:
|
||||
await otbr_manager.async_set_addon_options(new_addon_config)
|
||||
except AddonError as err:
|
||||
_LOGGER.error(err)
|
||||
raise AbortFlow(
|
||||
"addon_set_config_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": otbr_manager.addon_name,
|
||||
},
|
||||
) from err
|
||||
|
||||
await otbr_manager.async_start_addon_waiting()
|
||||
|
||||
async def async_step_firmware_download_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -353,85 +281,17 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_zigbee_installation_type(
|
||||
async def async_step_pick_firmware_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the installation type step."""
|
||||
return self.async_show_menu(
|
||||
step_id="zigbee_installation_type",
|
||||
menu_options=[
|
||||
"zigbee_intent_recommended",
|
||||
"zigbee_intent_custom",
|
||||
],
|
||||
)
|
||||
|
||||
async def async_step_zigbee_intent_recommended(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Select recommended installation type."""
|
||||
self._zigbee_integration = ZigbeeIntegration.ZHA
|
||||
return await self._async_continue_picked_firmware()
|
||||
|
||||
async def async_step_zigbee_intent_custom(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Select custom installation type."""
|
||||
return await self.async_step_zigbee_integration()
|
||||
|
||||
async def async_step_zigbee_integration(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Select Zigbee integration."""
|
||||
return self.async_show_menu(
|
||||
step_id="zigbee_integration",
|
||||
menu_options=[
|
||||
"zigbee_integration_zha",
|
||||
"zigbee_integration_other",
|
||||
],
|
||||
)
|
||||
|
||||
async def async_step_zigbee_integration_zha(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Select ZHA integration."""
|
||||
self._zigbee_integration = ZigbeeIntegration.ZHA
|
||||
return await self._async_continue_picked_firmware()
|
||||
|
||||
async def async_step_zigbee_integration_other(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Select other Zigbee integration."""
|
||||
self._zigbee_integration = ZigbeeIntegration.OTHER
|
||||
return await self._async_continue_picked_firmware()
|
||||
|
||||
async def _async_continue_picked_firmware(self) -> ConfigFlowResult:
|
||||
"""Continue to the picked firmware step."""
|
||||
"""Pick Zigbee firmware."""
|
||||
if not await self._probe_firmware_info():
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
if self._picked_firmware_type == PickedFirmwareType.ZIGBEE:
|
||||
return await self.async_step_install_zigbee_firmware()
|
||||
|
||||
if result := await self._ensure_thread_addon_setup():
|
||||
return result
|
||||
|
||||
return await self.async_step_install_thread_firmware()
|
||||
|
||||
async def async_step_pick_firmware_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pick Zigbee firmware."""
|
||||
self._picked_firmware_type = PickedFirmwareType.ZIGBEE
|
||||
return await self.async_step_zigbee_installation_type()
|
||||
|
||||
async def async_step_pick_firmware_zigbee_migrate(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pick Zigbee firmware. Migration is automatic."""
|
||||
return await self.async_step_pick_firmware_zigbee()
|
||||
return await self.async_step_install_zigbee_firmware()
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -457,43 +317,42 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Pre-confirm Zigbee setup."""
|
||||
|
||||
# This step is necessary to prevent `user_input` from being passed through
|
||||
return await self.async_step_continue_zigbee()
|
||||
return await self.async_step_confirm_zigbee()
|
||||
|
||||
async def async_step_continue_zigbee(
|
||||
async def async_step_confirm_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Continue Zigbee setup."""
|
||||
"""Confirm Zigbee setup."""
|
||||
assert self._device is not None
|
||||
assert self._hardware_name is not None
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="confirm_zigbee",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
if not await self._probe_firmware_info(probe_methods=(ApplicationType.EZSP,)):
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
if self._zigbee_integration == ZigbeeIntegration.OTHER:
|
||||
return self._async_flow_finished()
|
||||
|
||||
result = await self.hass.config_entries.flow.async_init(
|
||||
await self.hass.config_entries.flow.async_init(
|
||||
ZHA_DOMAIN,
|
||||
context={"source": "hardware"},
|
||||
data={
|
||||
"name": self._hardware_name,
|
||||
"port": {
|
||||
"path": self._device,
|
||||
"baudrate": self.ZIGBEE_BAUDRATE,
|
||||
"baudrate": 115200,
|
||||
"flow_control": "hardware",
|
||||
},
|
||||
"radio_type": "ezsp",
|
||||
},
|
||||
)
|
||||
return self._continue_zha_flow(result)
|
||||
|
||||
@callback
|
||||
def _continue_zha_flow(self, zha_result: ConfigFlowResult) -> ConfigFlowResult:
|
||||
"""Continue the ZHA flow."""
|
||||
raise NotImplementedError
|
||||
return self._async_flow_finished()
|
||||
|
||||
async def _ensure_thread_addon_setup(self) -> ConfigFlowResult | None:
|
||||
"""Ensure the OTBR addon is set up and not running."""
|
||||
@@ -512,7 +371,18 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
return await self.async_step_install_otbr_addon()
|
||||
|
||||
if addon_info.state == AddonState.RUNNING:
|
||||
# Stop the addon before continuing to flash firmware
|
||||
# We only fail setup if we have an instance of OTBR running *and* it's
|
||||
# pointing to different hardware
|
||||
if addon_info.options["device"] != self._device:
|
||||
return self.async_abort(
|
||||
reason="otbr_addon_already_running",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": otbr_manager.addon_name,
|
||||
},
|
||||
)
|
||||
|
||||
# Otherwise, stop the addon before continuing to flash firmware
|
||||
await otbr_manager.async_stop_addon()
|
||||
|
||||
return None
|
||||
@@ -521,14 +391,16 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pick Thread firmware."""
|
||||
self._picked_firmware_type = PickedFirmwareType.THREAD
|
||||
return await self._async_continue_picked_firmware()
|
||||
if not await self._probe_firmware_info():
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
async def async_step_pick_firmware_thread_migrate(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pick Thread firmware. Migration is automatic."""
|
||||
return await self.async_step_pick_firmware_thread()
|
||||
if result := await self._ensure_thread_addon_setup():
|
||||
return result
|
||||
|
||||
return await self.async_step_install_thread_firmware()
|
||||
|
||||
async def async_step_install_thread_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -581,8 +453,43 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
|
||||
if not self.addon_start_task:
|
||||
# Before we start the addon, confirm that the correct firmware is running
|
||||
# and populate `self._probed_firmware_info` with the correct information
|
||||
if not await self._probe_firmware_info(
|
||||
probe_methods=(ApplicationType.SPINEL,)
|
||||
):
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
assert self._device is not None
|
||||
new_addon_config = {
|
||||
**addon_info.options,
|
||||
"device": self._device,
|
||||
"baudrate": 460800,
|
||||
"flow_control": True,
|
||||
"autoflash_firmware": False,
|
||||
}
|
||||
|
||||
_LOGGER.debug("Reconfiguring OTBR addon with %s", new_addon_config)
|
||||
|
||||
try:
|
||||
await otbr_manager.async_set_addon_options(new_addon_config)
|
||||
except AddonError as err:
|
||||
_LOGGER.error(err)
|
||||
raise AbortFlow(
|
||||
"addon_set_config_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": otbr_manager.addon_name,
|
||||
},
|
||||
) from err
|
||||
|
||||
self.addon_start_task = self.hass.async_create_task(
|
||||
self._configure_and_start_otbr_addon()
|
||||
otbr_manager.async_start_addon_waiting()
|
||||
)
|
||||
|
||||
if not self.addon_start_task.done():
|
||||
@@ -601,9 +508,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
except (AddonError, AbortFlow) as err:
|
||||
_LOGGER.error(err)
|
||||
self._failed_addon_name = otbr_manager.addon_name
|
||||
self._failed_addon_reason = (
|
||||
err.reason if isinstance(err, AbortFlow) else "addon_start_failed"
|
||||
)
|
||||
self._failed_addon_reason = "addon_start_failed"
|
||||
return self.async_show_progress_done(next_step_id="addon_operation_failed")
|
||||
finally:
|
||||
self.addon_start_task = None
|
||||
@@ -667,21 +572,6 @@ class BaseFirmwareConfigFlow(BaseFirmwareInstallFlow, ConfigFlow):
|
||||
|
||||
return await self.async_step_pick_firmware()
|
||||
|
||||
@callback
|
||||
def _continue_zha_flow(self, zha_result: ConfigFlowResult) -> ConfigFlowResult:
|
||||
"""Continue the ZHA flow."""
|
||||
next_flow_id = zha_result["flow_id"]
|
||||
|
||||
result = self._async_flow_finished()
|
||||
return (
|
||||
self.async_create_entry(
|
||||
title=result["title"] or self._hardware_name,
|
||||
data=result["data"],
|
||||
next_flow=(FlowType.CONFIG_FLOW, next_flow_id),
|
||||
)
|
||||
| result # update all items with the child result
|
||||
)
|
||||
|
||||
|
||||
class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow):
|
||||
"""Zigbee and Thread options flow handlers."""
|
||||
@@ -739,10 +629,3 @@ class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow):
|
||||
)
|
||||
|
||||
return await super().async_step_pick_firmware_thread(user_input)
|
||||
|
||||
@callback
|
||||
def _continue_zha_flow(self, zha_result: ConfigFlowResult) -> ConfigFlowResult:
|
||||
"""Continue the ZHA flow."""
|
||||
# The options flow cannot return a next_flow yet, so we just finish here.
|
||||
# The options flow should be changed to a reconfigure flow.
|
||||
return self._async_flow_finished()
|
||||
|
@@ -3,19 +3,11 @@
|
||||
"options": {
|
||||
"step": {
|
||||
"pick_firmware": {
|
||||
"title": "Pick your protocol",
|
||||
"description": "You can use your {model} for a Zigbee or Thread network. Please check what type of devices you want to add to Home Assistant. You can always change this later.",
|
||||
"title": "Pick your firmware",
|
||||
"description": "Let's get started with setting up your {model}. Do you want to use it to set up a Zigbee or Thread network?",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "Use as Zigbee adapter",
|
||||
"pick_firmware_thread": "Use as Thread adapter",
|
||||
"pick_firmware_zigbee_migrate": "Migrate Zigbee to a new adapter",
|
||||
"pick_firmware_thread_migrate": "Migrate Thread to a new adapter"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "Most common protocol.",
|
||||
"pick_firmware_thread": "Often used for Matter over Thread devices.",
|
||||
"pick_firmware_zigbee_migrate": "This will move your Zigbee network to the new adapter.",
|
||||
"pick_firmware_thread_migrate": "This will migrate your Thread Border Router to the new adapter."
|
||||
"pick_firmware_zigbee": "Zigbee",
|
||||
"pick_firmware_thread": "Thread"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
@@ -37,29 +29,6 @@
|
||||
"confirm_otbr": {
|
||||
"title": "OpenThread Border Router setup complete",
|
||||
"description": "Your {model} is now an OpenThread Border Router and will show up in the Thread integration."
|
||||
},
|
||||
"zigbee_installation_type": {
|
||||
"title": "Set up Zigbee",
|
||||
"description": "Choose the installation type for the Zigbee adapter.",
|
||||
"menu_options": {
|
||||
"zigbee_intent_recommended": "Recommended installation",
|
||||
"zigbee_intent_custom": "Custom"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_intent_recommended": "Automatically install and configure Zigbee.",
|
||||
"zigbee_intent_custom": "Manually install and configure Zigbee, for example with Zigbee2MQTT."
|
||||
}
|
||||
},
|
||||
"zigbee_integration": {
|
||||
"title": "Select Zigbee method",
|
||||
"menu_options": {
|
||||
"zigbee_integration_zha": "Zigbee Home Automation",
|
||||
"zigbee_integration_other": "Other"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_integration_zha": "Lets Home Assistant control a Zigbee network.",
|
||||
"zigbee_integration_other": "For example if you want to use the adapter with Zigbee2MQTT."
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@@ -52,16 +52,8 @@
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
@@ -83,29 +75,6 @@
|
||||
"confirm_otbr": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]"
|
||||
},
|
||||
"zigbee_installation_type": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::description%]",
|
||||
"menu_options": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_custom%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_custom%]"
|
||||
}
|
||||
},
|
||||
"zigbee_integration": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::title%]",
|
||||
"menu_options": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_other%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_other%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -142,15 +111,7 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
@@ -172,29 +133,6 @@
|
||||
"confirm_otbr": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]"
|
||||
},
|
||||
"zigbee_installation_type": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::description%]",
|
||||
"menu_options": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_custom%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_custom%]"
|
||||
}
|
||||
},
|
||||
"zigbee_integration": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::title%]",
|
||||
"menu_options": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_other%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_other%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@@ -92,7 +92,7 @@ class YellowFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
firmware_name="Zigbee",
|
||||
expected_installed_firmware_type=ApplicationType.EZSP,
|
||||
step_id="install_zigbee_firmware",
|
||||
next_step_id="pre_confirm_zigbee",
|
||||
next_step_id="confirm_zigbee",
|
||||
)
|
||||
|
||||
async def async_step_install_thread_firmware(
|
||||
|
@@ -75,16 +75,8 @@
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
@@ -106,29 +98,6 @@
|
||||
"confirm_otbr": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]"
|
||||
},
|
||||
"zigbee_installation_type": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::description%]",
|
||||
"menu_options": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_custom%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_recommended%]",
|
||||
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_custom%]"
|
||||
}
|
||||
},
|
||||
"zigbee_integration": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::title%]",
|
||||
"menu_options": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_other%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_zha%]",
|
||||
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_other%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@@ -26,8 +26,8 @@
|
||||
}
|
||||
},
|
||||
"verification_code": {
|
||||
"title": "Apple Account code",
|
||||
"description": "Please enter the verification code you just received from Apple",
|
||||
"title": "iCloud verification code",
|
||||
"description": "Please enter the verification code you just received from iCloud",
|
||||
"data": {
|
||||
"verification_code": "Verification code"
|
||||
}
|
||||
@@ -47,11 +47,11 @@
|
||||
"services": {
|
||||
"update": {
|
||||
"name": "Update",
|
||||
"description": "Asks for a state update of all devices linked to an Apple Account.",
|
||||
"description": "Asks for a state update of all devices linked to an iCloud account.",
|
||||
"fields": {
|
||||
"account": {
|
||||
"name": "Account",
|
||||
"description": "Your Apple Account username (email)."
|
||||
"description": "Your iCloud account username (email) or account name."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -105,20 +105,6 @@ async def _async_get_image(image_entity: ImageEntity, timeout: int) -> Image:
|
||||
raise HomeAssistantError("Unable to get image")
|
||||
|
||||
|
||||
async def async_get_image(
|
||||
hass: HomeAssistant,
|
||||
entity_id: str,
|
||||
timeout: int = 10,
|
||||
) -> Image:
|
||||
"""Fetch an image from an image entity."""
|
||||
component = hass.data[DATA_COMPONENT]
|
||||
|
||||
if (image := component.get_entity(entity_id)) is None:
|
||||
raise HomeAssistantError(f"Image entity {entity_id} not found")
|
||||
|
||||
return await _async_get_image(image, timeout)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the image component."""
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[ImageEntity](
|
||||
|
@@ -118,31 +118,27 @@ COVER_KNX_SCHEMA = AllSerializeFirst(
|
||||
vol.Schema(
|
||||
{
|
||||
"section_binary_control": KNXSectionFlat(),
|
||||
vol.Optional(CONF_GA_UP_DOWN): GASelector(state=False, valid_dpt="1"),
|
||||
vol.Optional(CONF_GA_UP_DOWN): GASelector(state=False),
|
||||
vol.Optional(CoverConf.INVERT_UPDOWN): selector.BooleanSelector(),
|
||||
"section_stop_control": KNXSectionFlat(),
|
||||
vol.Optional(CONF_GA_STOP): GASelector(state=False, valid_dpt="1"),
|
||||
vol.Optional(CONF_GA_STEP): GASelector(state=False, valid_dpt="1"),
|
||||
vol.Optional(CONF_GA_STOP): GASelector(state=False),
|
||||
vol.Optional(CONF_GA_STEP): GASelector(state=False),
|
||||
"section_position_control": KNXSectionFlat(collapsible=True),
|
||||
vol.Optional(CONF_GA_POSITION_SET): GASelector(
|
||||
state=False, valid_dpt="5.001"
|
||||
),
|
||||
vol.Optional(CONF_GA_POSITION_STATE): GASelector(
|
||||
write=False, valid_dpt="5.001"
|
||||
),
|
||||
vol.Optional(CONF_GA_POSITION_SET): GASelector(state=False),
|
||||
vol.Optional(CONF_GA_POSITION_STATE): GASelector(write=False),
|
||||
vol.Optional(CoverConf.INVERT_POSITION): selector.BooleanSelector(),
|
||||
"section_tilt_control": KNXSectionFlat(collapsible=True),
|
||||
vol.Optional(CONF_GA_ANGLE): GASelector(valid_dpt="5.001"),
|
||||
vol.Optional(CONF_GA_ANGLE): GASelector(),
|
||||
vol.Optional(CoverConf.INVERT_ANGLE): selector.BooleanSelector(),
|
||||
"section_travel_time": KNXSectionFlat(),
|
||||
vol.Required(
|
||||
vol.Optional(
|
||||
CoverConf.TRAVELLING_TIME_UP, default=25
|
||||
): selector.NumberSelector(
|
||||
selector.NumberSelectorConfig(
|
||||
min=0, max=1000, step=0.1, unit_of_measurement="s"
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
vol.Optional(
|
||||
CoverConf.TRAVELLING_TIME_DOWN, default=25
|
||||
): selector.NumberSelector(
|
||||
selector.NumberSelectorConfig(
|
||||
@@ -314,7 +310,7 @@ LIGHT_KNX_SCHEMA = AllSerializeFirst(
|
||||
SWITCH_KNX_SCHEMA = vol.Schema(
|
||||
{
|
||||
"section_switch": KNXSectionFlat(),
|
||||
vol.Required(CONF_GA_SWITCH): GASelector(write_required=True, valid_dpt="1"),
|
||||
vol.Required(CONF_GA_SWITCH): GASelector(write_required=True),
|
||||
vol.Optional(CONF_INVERT, default=False): selector.BooleanSelector(),
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): selector.BooleanSelector(),
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): SyncStateSelector(),
|
||||
|
@@ -209,11 +209,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_entity": {
|
||||
"title": "{name} is deprecated",
|
||||
"description": "The Litter-Robot entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your dashboards, automations and scripts, disable `{entity}` and reload the integration/restart Home Assistant to fix this issue."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -6,24 +6,13 @@ from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Generic
|
||||
|
||||
from pylitterbot import FeederRobot, LitterRobot, LitterRobot3, LitterRobot4, Robot
|
||||
from pylitterbot import FeederRobot, LitterRobot, Robot
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
SwitchEntity,
|
||||
SwitchEntityDescription,
|
||||
)
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LitterRobotConfigEntry
|
||||
from .entity import LitterRobotEntity, _WhiskerEntityT
|
||||
|
||||
@@ -37,15 +26,6 @@ class RobotSwitchEntityDescription(SwitchEntityDescription, Generic[_WhiskerEnti
|
||||
value_fn: Callable[[_WhiskerEntityT], bool]
|
||||
|
||||
|
||||
NIGHT_LIGHT_MODE_ENTITY_DESCRIPTION = RobotSwitchEntityDescription[
|
||||
LitterRobot | FeederRobot
|
||||
](
|
||||
key="night_light_mode_enabled",
|
||||
translation_key="night_light_mode",
|
||||
set_fn=lambda robot, value: robot.set_night_light(value),
|
||||
value_fn=lambda robot: robot.night_light_mode_enabled,
|
||||
)
|
||||
|
||||
SWITCH_MAP: dict[type[Robot], tuple[RobotSwitchEntityDescription, ...]] = {
|
||||
FeederRobot: (
|
||||
RobotSwitchEntityDescription[FeederRobot](
|
||||
@@ -54,10 +34,14 @@ SWITCH_MAP: dict[type[Robot], tuple[RobotSwitchEntityDescription, ...]] = {
|
||||
set_fn=lambda robot, value: robot.set_gravity_mode(value),
|
||||
value_fn=lambda robot: robot.gravity_mode_enabled,
|
||||
),
|
||||
NIGHT_LIGHT_MODE_ENTITY_DESCRIPTION,
|
||||
),
|
||||
LitterRobot3: (NIGHT_LIGHT_MODE_ENTITY_DESCRIPTION,),
|
||||
Robot: ( # type: ignore[type-abstract] # only used for isinstance check
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="night_light_mode_enabled",
|
||||
translation_key="night_light_mode",
|
||||
set_fn=lambda robot, value: robot.set_night_light(value),
|
||||
value_fn=lambda robot: robot.night_light_mode_enabled,
|
||||
),
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="panel_lock_enabled",
|
||||
translation_key="panel_lockout",
|
||||
@@ -75,54 +59,13 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Litter-Robot switches using config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
entities = [
|
||||
async_add_entities(
|
||||
RobotSwitchEntity(robot=robot, coordinator=coordinator, description=description)
|
||||
for robot in coordinator.account.robots
|
||||
for robot_type, entity_descriptions in SWITCH_MAP.items()
|
||||
if isinstance(robot, robot_type)
|
||||
for description in entity_descriptions
|
||||
]
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
def add_deprecated_entity(
|
||||
robot: LitterRobot4,
|
||||
description: RobotSwitchEntityDescription,
|
||||
entity_cls: type[RobotSwitchEntity],
|
||||
) -> None:
|
||||
"""Add deprecated entities."""
|
||||
unique_id = f"{robot.serial}-{description.key}"
|
||||
if entity_id := ent_reg.async_get_entity_id(SWITCH_DOMAIN, DOMAIN, unique_id):
|
||||
entity_entry = ent_reg.async_get(entity_id)
|
||||
if entity_entry and entity_entry.disabled:
|
||||
ent_reg.async_remove(entity_id)
|
||||
async_delete_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_entity_{unique_id}",
|
||||
)
|
||||
elif entity_entry:
|
||||
entities.append(entity_cls(robot, coordinator, description))
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_entity_{unique_id}",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_entity",
|
||||
translation_placeholders={
|
||||
"name": f"{robot.name} {entity_entry.name or entity_entry.original_name}",
|
||||
"entity": entity_id,
|
||||
},
|
||||
)
|
||||
|
||||
for robot in coordinator.account.get_robots(LitterRobot4):
|
||||
add_deprecated_entity(
|
||||
robot, NIGHT_LIGHT_MODE_ENTITY_DESCRIPTION, RobotSwitchEntity
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
)
|
||||
|
||||
|
||||
class RobotSwitchEntity(LitterRobotEntity[_WhiskerEntityT], SwitchEntity):
|
||||
|
@@ -338,7 +338,7 @@ STATE_PROGRAM_PHASE: dict[int, dict[int, str]] = {
|
||||
}
|
||||
|
||||
|
||||
class StateProgramType(MieleEnum, missing_to_none=True):
|
||||
class StateProgramType(MieleEnum):
|
||||
"""Defines program types."""
|
||||
|
||||
normal_operation_mode = 0
|
||||
@@ -346,9 +346,10 @@ class StateProgramType(MieleEnum, missing_to_none=True):
|
||||
automatic_program = 2
|
||||
cleaning_care_program = 3
|
||||
maintenance_program = 4
|
||||
missing2none = -9999
|
||||
|
||||
|
||||
class StateDryingStep(MieleEnum, missing_to_none=True):
|
||||
class StateDryingStep(MieleEnum):
|
||||
"""Defines drying steps."""
|
||||
|
||||
extra_dry = 0
|
||||
@@ -359,6 +360,7 @@ class StateDryingStep(MieleEnum, missing_to_none=True):
|
||||
hand_iron_2 = 5
|
||||
machine_iron = 6
|
||||
smoothing = 7
|
||||
missing2none = -9999
|
||||
|
||||
|
||||
WASHING_MACHINE_PROGRAM_ID: dict[int, str] = {
|
||||
@@ -1312,7 +1314,7 @@ STATE_PROGRAM_ID: dict[int, dict[int, str]] = {
|
||||
}
|
||||
|
||||
|
||||
class PlatePowerStep(MieleEnum, missing_to_none=True):
|
||||
class PlatePowerStep(MieleEnum):
|
||||
"""Plate power settings."""
|
||||
|
||||
plate_step_0 = 0
|
||||
@@ -1337,3 +1339,4 @@ class PlatePowerStep(MieleEnum, missing_to_none=True):
|
||||
plate_step_18 = 18
|
||||
plate_step_boost = 117, 118, 218
|
||||
plate_step_boost_2 = 217
|
||||
missing2none = -9999
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pymiele"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymiele==0.5.5"],
|
||||
"requirements": ["pymiele==0.5.4"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_mieleathome._tcp.local."]
|
||||
}
|
||||
|
@@ -64,7 +64,7 @@ PROGRAM_TO_SPEED: dict[int, str] = {
|
||||
}
|
||||
|
||||
|
||||
class MieleVacuumStateCode(MieleEnum, missing_to_none=True):
|
||||
class MieleVacuumStateCode(MieleEnum):
|
||||
"""Define vacuum state codes."""
|
||||
|
||||
idle = 0
|
||||
@@ -82,6 +82,7 @@ class MieleVacuumStateCode(MieleEnum, missing_to_none=True):
|
||||
blocked_front_wheel = 5900
|
||||
docked = 5903, 5904
|
||||
remote_controlled = 5910
|
||||
missing2none = -9999
|
||||
|
||||
|
||||
SUPPORTED_FEATURES = (
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aionfty"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiontfy==0.6.0"]
|
||||
"requirements": ["aiontfy==0.5.5"]
|
||||
}
|
||||
|
@@ -371,11 +371,7 @@ class NumberEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@final
|
||||
@property
|
||||
def __native_unit_of_measurement_compat(self) -> str | None:
|
||||
"""Handle wrong character coding in unit provided by integrations.
|
||||
|
||||
NumberEntity should read the number's native unit through this property instead
|
||||
of through native_unit_of_measurement.
|
||||
"""
|
||||
"""Process ambiguous units."""
|
||||
native_unit_of_measurement = self.native_unit_of_measurement
|
||||
return AMBIGUOUS_UNITS.get(
|
||||
native_unit_of_measurement, native_unit_of_measurement
|
||||
|
@@ -124,7 +124,7 @@ class NumberDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppm` (parts per million), mg/m³
|
||||
Unit of measurement: `ppm` (parts per million)
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -469,10 +469,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure),
|
||||
NumberDeviceClass.BATTERY: {PERCENTAGE},
|
||||
NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
NumberDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity),
|
||||
NumberDeviceClass.CURRENT: set(UnitOfElectricCurrent),
|
||||
|
@@ -148,7 +148,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
content.extend(
|
||||
await async_prepare_files_for_prompt(
|
||||
hass, [(Path(filename), None) for filename in filenames]
|
||||
hass, [Path(filename) for filename in filenames]
|
||||
)
|
||||
)
|
||||
|
||||
|
@@ -223,17 +223,15 @@ def _convert_content_to_param(
|
||||
ResponseReasoningItemParam(
|
||||
type="reasoning",
|
||||
id=content.native.id,
|
||||
summary=(
|
||||
[
|
||||
{
|
||||
"type": "summary_text",
|
||||
"text": summary,
|
||||
}
|
||||
for summary in reasoning_summary
|
||||
]
|
||||
if content.thinking_content
|
||||
else []
|
||||
),
|
||||
summary=[
|
||||
{
|
||||
"type": "summary_text",
|
||||
"text": summary,
|
||||
}
|
||||
for summary in reasoning_summary
|
||||
]
|
||||
if content.thinking_content
|
||||
else [],
|
||||
encrypted_content=content.native.encrypted_content,
|
||||
)
|
||||
)
|
||||
@@ -310,11 +308,9 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"tool_call_id": event.item.id,
|
||||
"tool_name": "code_interpreter",
|
||||
"tool_result": {
|
||||
"output": (
|
||||
[output.to_dict() for output in event.item.outputs] # type: ignore[misc]
|
||||
if event.item.outputs is not None
|
||||
else None
|
||||
)
|
||||
"output": [output.to_dict() for output in event.item.outputs] # type: ignore[misc]
|
||||
if event.item.outputs is not None
|
||||
else None
|
||||
},
|
||||
}
|
||||
last_role = "tool_result"
|
||||
@@ -533,7 +529,7 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
files = await async_prepare_files_for_prompt(
|
||||
self.hass,
|
||||
[(a.path, a.mime_type) for a in last_content.attachments],
|
||||
[a.path for a in last_content.attachments],
|
||||
)
|
||||
last_message = messages[-1]
|
||||
assert (
|
||||
@@ -605,7 +601,7 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
|
||||
|
||||
async def async_prepare_files_for_prompt(
|
||||
hass: HomeAssistant, files: list[tuple[Path, str | None]]
|
||||
hass: HomeAssistant, files: list[Path]
|
||||
) -> ResponseInputMessageContentListParam:
|
||||
"""Append files to a prompt.
|
||||
|
||||
@@ -615,12 +611,11 @@ async def async_prepare_files_for_prompt(
|
||||
def append_files_to_content() -> ResponseInputMessageContentListParam:
|
||||
content: ResponseInputMessageContentListParam = []
|
||||
|
||||
for file_path, mime_type in files:
|
||||
for file_path in files:
|
||||
if not file_path.exists():
|
||||
raise HomeAssistantError(f"`{file_path}` does not exist")
|
||||
|
||||
if mime_type is None:
|
||||
mime_type = guess_file_type(file_path)[0]
|
||||
mime_type, _ = guess_file_type(file_path)
|
||||
|
||||
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
|
||||
raise HomeAssistantError(
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["opower==0.15.5"]
|
||||
"requirements": ["opower==0.15.4"]
|
||||
}
|
||||
|
@@ -46,7 +46,6 @@ from homeassistant.util.unit_conversion import (
|
||||
AreaConverter,
|
||||
BaseUnitConverter,
|
||||
BloodGlucoseConcentrationConverter,
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
ConductivityConverter,
|
||||
DataRateConverter,
|
||||
DistanceConverter,
|
||||
@@ -205,10 +204,6 @@ STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
|
||||
**dict.fromkeys(
|
||||
MassVolumeConcentrationConverter.VALID_UNITS, MassVolumeConcentrationConverter
|
||||
),
|
||||
**dict.fromkeys(
|
||||
CarbonMonoxideConcentrationConverter.VALID_UNITS,
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
),
|
||||
**dict.fromkeys(ConductivityConverter.VALID_UNITS, ConductivityConverter),
|
||||
**dict.fromkeys(DataRateConverter.VALID_UNITS, DataRateConverter),
|
||||
**dict.fromkeys(DistanceConverter.VALID_UNITS, DistanceConverter),
|
||||
|
@@ -19,7 +19,6 @@ from homeassistant.util.unit_conversion import (
|
||||
ApparentPowerConverter,
|
||||
AreaConverter,
|
||||
BloodGlucoseConcentrationConverter,
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
ConductivityConverter,
|
||||
DataRateConverter,
|
||||
DistanceConverter,
|
||||
@@ -67,9 +66,6 @@ UNIT_SCHEMA = vol.Schema(
|
||||
vol.Optional("blood_glucose_concentration"): vol.In(
|
||||
BloodGlucoseConcentrationConverter.VALID_UNITS
|
||||
),
|
||||
vol.Optional("carbon_monoxide"): vol.In(
|
||||
CarbonMonoxideConcentrationConverter.VALID_UNITS
|
||||
),
|
||||
vol.Optional("concentration"): vol.In(
|
||||
MassVolumeConcentrationConverter.VALID_UNITS
|
||||
),
|
||||
|
@@ -7,11 +7,9 @@ from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from reolink_aio.api import Host
|
||||
from reolink_aio.const import MAX_COLOR_TEMP, MIN_COLOR_TEMP
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
LightEntityDescription,
|
||||
@@ -39,10 +37,8 @@ class ReolinkLightEntityDescription(
|
||||
"""A class that describes light entities."""
|
||||
|
||||
get_brightness_fn: Callable[[Host, int], int | None] | None = None
|
||||
get_color_temp_fn: Callable[[Host, int], int | None] | None = None
|
||||
is_on_fn: Callable[[Host, int], bool]
|
||||
set_brightness_fn: Callable[[Host, int, int], Any] | None = None
|
||||
set_color_temp_fn: Callable[[Host, int, int], Any] | None = None
|
||||
turn_on_off_fn: Callable[[Host, int, bool], Any]
|
||||
|
||||
|
||||
@@ -68,10 +64,6 @@ LIGHT_ENTITIES = (
|
||||
turn_on_off_fn=lambda api, ch, value: api.set_whiteled(ch, state=value),
|
||||
get_brightness_fn=lambda api, ch: api.whiteled_brightness(ch),
|
||||
set_brightness_fn=lambda api, ch, value: api.set_whiteled(ch, brightness=value),
|
||||
get_color_temp_fn=lambda api, ch: api.whiteled_color_temperature(ch),
|
||||
set_color_temp_fn=lambda api, ch, value: (
|
||||
api.baichuan.set_floodlight(ch, color_temp=value)
|
||||
),
|
||||
),
|
||||
ReolinkLightEntityDescription(
|
||||
key="status_led",
|
||||
@@ -135,20 +127,12 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
|
||||
self.entity_description = entity_description
|
||||
super().__init__(reolink_data, channel)
|
||||
|
||||
if (
|
||||
entity_description.set_color_temp_fn is not None
|
||||
and self._host.api.supported(self._channel, "color_temp")
|
||||
):
|
||||
self._attr_supported_color_modes = {ColorMode.COLOR_TEMP}
|
||||
self._attr_color_mode = ColorMode.COLOR_TEMP
|
||||
self._attr_min_color_temp_kelvin = MIN_COLOR_TEMP
|
||||
self._attr_max_color_temp_kelvin = MAX_COLOR_TEMP
|
||||
elif entity_description.set_brightness_fn is not None:
|
||||
self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
self._attr_color_mode = ColorMode.BRIGHTNESS
|
||||
else:
|
||||
if entity_description.set_brightness_fn is None:
|
||||
self._attr_supported_color_modes = {ColorMode.ONOFF}
|
||||
self._attr_color_mode = ColorMode.ONOFF
|
||||
else:
|
||||
self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
self._attr_color_mode = ColorMode.BRIGHTNESS
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
@@ -168,13 +152,6 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
|
||||
|
||||
return round(255 * bright_pct / 100.0)
|
||||
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
"""Return the color temperature of this light in kelvin."""
|
||||
assert self.entity_description.get_color_temp_fn is not None
|
||||
|
||||
return self.entity_description.get_color_temp_fn(self._host.api, self._channel)
|
||||
|
||||
@raise_translated_error
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn light off."""
|
||||
@@ -194,13 +171,6 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
|
||||
self._host.api, self._channel, brightness_pct
|
||||
)
|
||||
|
||||
if (
|
||||
color_temp := kwargs.get(ATTR_COLOR_TEMP_KELVIN)
|
||||
) is not None and self.entity_description.set_color_temp_fn is not None:
|
||||
await self.entity_description.set_color_temp_fn(
|
||||
self._host.api, self._channel, color_temp
|
||||
)
|
||||
|
||||
await self.entity_description.turn_on_off_fn(
|
||||
self._host.api, self._channel, True
|
||||
)
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.15.1"]
|
||||
"requirements": ["reolink-aio==0.15.0"]
|
||||
}
|
||||
|
@@ -502,7 +502,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_brightness",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=[26, 78],
|
||||
cmd_id=26,
|
||||
translation_key="image_brightness",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -516,7 +516,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_contrast",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=[26, 78],
|
||||
cmd_id=26,
|
||||
translation_key="image_contrast",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -530,7 +530,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_saturation",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=[26, 78],
|
||||
cmd_id=26,
|
||||
translation_key="image_saturation",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -544,7 +544,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_sharpness",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=[26, 78],
|
||||
cmd_id=26,
|
||||
translation_key="image_sharpness",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -558,7 +558,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_hue",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=[26, 78],
|
||||
cmd_id=26,
|
||||
translation_key="image_hue",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
|
@@ -140,7 +140,6 @@ SENSORS = (
|
||||
HOST_SENSORS = (
|
||||
ReolinkHostSensorEntityDescription(
|
||||
key="wifi_signal",
|
||||
cmd_id=464,
|
||||
cmd_key="115",
|
||||
translation_key="wifi_signal",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
|
@@ -19,7 +19,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==2.44.1",
|
||||
"python-roborock==2.18.2",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
@@ -366,7 +366,7 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
because a unit converter supports both.
|
||||
"""
|
||||
# No need to check the unit converter if the units are the same
|
||||
if self.__native_unit_of_measurement_compat == suggested_unit_of_measurement:
|
||||
if self.native_unit_of_measurement == suggested_unit_of_measurement:
|
||||
return True
|
||||
|
||||
# Make sure there is a unit converter and it supports both units
|
||||
@@ -478,11 +478,7 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@final
|
||||
@property
|
||||
def __native_unit_of_measurement_compat(self) -> str | None:
|
||||
"""Handle wrong character coding in unit provided by integrations.
|
||||
|
||||
SensorEntity should read the sensor's native unit through this property instead
|
||||
of through native_unit_of_measurement.
|
||||
"""
|
||||
"""Process ambiguous units."""
|
||||
native_unit_of_measurement = self.native_unit_of_measurement
|
||||
return AMBIGUOUS_UNITS.get(
|
||||
native_unit_of_measurement,
|
||||
|
@@ -51,7 +51,6 @@ from homeassistant.util.unit_conversion import (
|
||||
AreaConverter,
|
||||
BaseUnitConverter,
|
||||
BloodGlucoseConcentrationConverter,
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
ConductivityConverter,
|
||||
DataRateConverter,
|
||||
DistanceConverter,
|
||||
@@ -157,7 +156,7 @@ class SensorDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppm` (parts per million), `mg/m³`
|
||||
Unit of measurement: `ppm` (parts per million)
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -538,7 +537,6 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] =
|
||||
SensorDeviceClass.AREA: AreaConverter,
|
||||
SensorDeviceClass.ATMOSPHERIC_PRESSURE: PressureConverter,
|
||||
SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: BloodGlucoseConcentrationConverter,
|
||||
SensorDeviceClass.CO: CarbonMonoxideConcentrationConverter,
|
||||
SensorDeviceClass.CONDUCTIVITY: ConductivityConverter,
|
||||
SensorDeviceClass.CURRENT: ElectricCurrentConverter,
|
||||
SensorDeviceClass.DATA_RATE: DataRateConverter,
|
||||
@@ -580,10 +578,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
SensorDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure),
|
||||
SensorDeviceClass.BATTERY: {PERCENTAGE},
|
||||
SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
SensorDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
SensorDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
SensorDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
SensorDeviceClass.CONDUCTIVITY: set(UnitOfConductivity),
|
||||
SensorDeviceClass.CURRENT: set(UnitOfElectricCurrent),
|
||||
|
@@ -59,7 +59,6 @@ from .coordinator import (
|
||||
from .repairs import (
|
||||
async_manage_ble_scanner_firmware_unsupported_issue,
|
||||
async_manage_outbound_websocket_incorrectly_enabled_issue,
|
||||
async_manage_wall_display_firmware_unsupported_issue,
|
||||
)
|
||||
from .utils import (
|
||||
async_create_issue_unsupported_firmware,
|
||||
@@ -329,7 +328,6 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry)
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, runtime_data.platforms
|
||||
)
|
||||
async_manage_wall_display_firmware_unsupported_issue(hass, entry)
|
||||
async_manage_ble_scanner_firmware_unsupported_issue(
|
||||
hass,
|
||||
entry,
|
||||
|
@@ -232,7 +232,6 @@ class BLEScannerMode(StrEnum):
|
||||
|
||||
|
||||
BLE_SCANNER_MIN_FIRMWARE = "1.5.1"
|
||||
WALL_DISPLAY_MIN_FIRMWARE = "2.3.0"
|
||||
|
||||
MAX_PUSH_UPDATE_FAILURES = 5
|
||||
PUSH_UPDATE_ISSUE_ID = "push_update_{unique}"
|
||||
@@ -245,9 +244,6 @@ BLE_SCANNER_FIRMWARE_UNSUPPORTED_ISSUE_ID = "ble_scanner_firmware_unsupported_{u
|
||||
OUTBOUND_WEBSOCKET_INCORRECTLY_ENABLED_ISSUE_ID = (
|
||||
"outbound_websocket_incorrectly_enabled_{unique}"
|
||||
)
|
||||
WALL_DISPLAY_FIRMWARE_UNSUPPORTED_ISSUE_ID = (
|
||||
"wall_display_firmware_unsupported_{unique}"
|
||||
)
|
||||
|
||||
GAS_VALVE_OPEN_STATES = ("opening", "opened")
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioshelly"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioshelly==13.10.0"],
|
||||
"requirements": ["aioshelly==13.9.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aioshelly.const import MODEL_OUT_PLUG_S_G3, MODEL_PLUG_S_G3, MODEL_WALL_DISPLAY
|
||||
from aioshelly.const import MODEL_OUT_PLUG_S_G3, MODEL_PLUG_S_G3
|
||||
from aioshelly.exceptions import DeviceConnectionError, RpcCallError
|
||||
from aioshelly.rpc_device import RpcDevice
|
||||
from awesomeversion import AwesomeVersion
|
||||
@@ -21,8 +21,6 @@ from .const import (
|
||||
CONF_BLE_SCANNER_MODE,
|
||||
DOMAIN,
|
||||
OUTBOUND_WEBSOCKET_INCORRECTLY_ENABLED_ISSUE_ID,
|
||||
WALL_DISPLAY_FIRMWARE_UNSUPPORTED_ISSUE_ID,
|
||||
WALL_DISPLAY_MIN_FIRMWARE,
|
||||
BLEScannerMode,
|
||||
)
|
||||
from .coordinator import ShellyConfigEntry
|
||||
@@ -69,42 +67,6 @@ def async_manage_ble_scanner_firmware_unsupported_issue(
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_manage_wall_display_firmware_unsupported_issue(
|
||||
hass: HomeAssistant,
|
||||
entry: ShellyConfigEntry,
|
||||
) -> None:
|
||||
"""Manage the Wall Display firmware unsupported issue."""
|
||||
issue_id = WALL_DISPLAY_FIRMWARE_UNSUPPORTED_ISSUE_ID.format(unique=entry.unique_id)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert entry.runtime_data.rpc is not None
|
||||
|
||||
device = entry.runtime_data.rpc.device
|
||||
|
||||
if entry.data["model"] == MODEL_WALL_DISPLAY:
|
||||
firmware = AwesomeVersion(device.shelly["ver"])
|
||||
if firmware < WALL_DISPLAY_MIN_FIRMWARE:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="wall_display_firmware_unsupported",
|
||||
translation_placeholders={
|
||||
"device_name": device.name,
|
||||
"ip_address": device.ip_address,
|
||||
"firmware": firmware,
|
||||
},
|
||||
data={"entry_id": entry.entry_id},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_manage_outbound_websocket_incorrectly_enabled_issue(
|
||||
hass: HomeAssistant,
|
||||
@@ -180,8 +142,8 @@ class ShellyRpcRepairsFlow(RepairsFlow):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class FirmwareUpdateFlow(ShellyRpcRepairsFlow):
|
||||
"""Handler for Firmware Update flow."""
|
||||
class BleScannerFirmwareUpdateFlow(ShellyRpcRepairsFlow):
|
||||
"""Handler for BLE Scanner Firmware Update flow."""
|
||||
|
||||
async def _async_step_confirm(self) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
@@ -239,11 +201,8 @@ async def async_create_fix_flow(
|
||||
|
||||
device = entry.runtime_data.rpc.device
|
||||
|
||||
if (
|
||||
"ble_scanner_firmware_unsupported" in issue_id
|
||||
or "wall_display_firmware_unsupported" in issue_id
|
||||
):
|
||||
return FirmwareUpdateFlow(device)
|
||||
if "ble_scanner_firmware_unsupported" in issue_id:
|
||||
return BleScannerFirmwareUpdateFlow(device)
|
||||
|
||||
if "outbound_websocket_incorrectly_enabled" in issue_id:
|
||||
return DisableOutboundWebSocketFlow(device)
|
||||
|
@@ -288,21 +288,6 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"wall_display_firmware_unsupported": {
|
||||
"title": "{device_name} is running outdated firmware",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"title": "{device_name} is running outdated firmware",
|
||||
"description": "Your Shelly device {device_name} with IP address {ip_address} is running firmware {firmware}. This firmware version will not be supported by Shelly integration starting from Home Assistant 2025.11.0.\n\nSelect **Submit** button to start the OTA update to the latest stable firmware version."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"update_not_available": "[%key:component::shelly::issues::ble_scanner_firmware_unsupported::fix_flow::abort::update_not_available%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["solarlog_cli"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["solarlog_cli==0.6.0"]
|
||||
"requirements": ["solarlog_cli==0.5.0"]
|
||||
}
|
||||
|
@@ -8,7 +8,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sonos",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["soco", "sonos_websocket"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["soco==0.30.11", "sonos-websocket==0.1.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
|
@@ -305,7 +305,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
@soco_error()
|
||||
def set_volume_level(self, volume: float) -> None:
|
||||
"""Set volume level, range 0..1."""
|
||||
self.soco.volume = int(round(volume * 100))
|
||||
self.soco.volume = int(volume * 100)
|
||||
|
||||
@soco_error(UPNP_ERRORS_TO_IGNORE)
|
||||
def set_shuffle(self, shuffle: bool) -> None:
|
||||
|
@@ -7,7 +7,7 @@ from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_CONDITION, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.const import SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import (
|
||||
@@ -25,7 +25,6 @@ _CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "sun",
|
||||
vol.Optional("before"): cv.sun_event,
|
||||
vol.Optional("before_offset"): cv.time_period,
|
||||
vol.Optional("after"): vol.All(
|
||||
|
@@ -1,17 +1,12 @@
|
||||
{
|
||||
"domain": "switchbot_cloud",
|
||||
"name": "SwitchBot Cloud",
|
||||
"codeowners": [
|
||||
"@SeraphicRav",
|
||||
"@laurence-presland",
|
||||
"@Gigatrappeur",
|
||||
"@XiaoLing-git"
|
||||
],
|
||||
"codeowners": ["@SeraphicRav", "@laurence-presland", "@Gigatrappeur"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/switchbot_cloud",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["switchbot_api"],
|
||||
"requirements": ["switchbot-api==2.8.0"]
|
||||
"requirements": ["switchbot-api==2.7.0"]
|
||||
}
|
||||
|
@@ -365,7 +365,7 @@ class UniversalMediaPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_image_url(self):
|
||||
"""Image url of current playing media."""
|
||||
return self._override_or_child_attr(ATTR_ENTITY_PICTURE)
|
||||
return self._child_attr(ATTR_ENTITY_PICTURE)
|
||||
|
||||
@property
|
||||
def entity_picture(self):
|
||||
|
@@ -18,7 +18,6 @@ from homeassistant.components.recorder.models import uuid_hex_to_bytes_or_none
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.json import json_loads_object
|
||||
|
||||
@@ -42,6 +41,8 @@ ALLOWED_DOMAINS = {
|
||||
Platform.CAMERA,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.DATE,
|
||||
Platform.DATETIME,
|
||||
Platform.FAN,
|
||||
Platform.HUMIDIFIER,
|
||||
Platform.IMAGE,
|
||||
@@ -56,9 +57,14 @@ ALLOWED_DOMAINS = {
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
Platform.TEXT,
|
||||
Platform.TIME,
|
||||
Platform.TODO,
|
||||
Platform.UPDATE,
|
||||
Platform.VACUUM,
|
||||
Platform.VALVE,
|
||||
Platform.WAKE_WORD,
|
||||
Platform.WATER_HEATER,
|
||||
Platform.WEATHER,
|
||||
# Helpers with own domain
|
||||
"counter",
|
||||
"group",
|
||||
@@ -99,17 +105,14 @@ async def async_predict_common_control(
|
||||
"""
|
||||
# Get the recorder instance to ensure it's ready
|
||||
recorder = get_instance(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# Execute the database operation in the recorder's executor
|
||||
return await recorder.async_add_executor_job(
|
||||
_fetch_with_session, hass, _fetch_and_process_data, ent_reg, user_id
|
||||
_fetch_with_session, hass, _fetch_and_process_data, user_id
|
||||
)
|
||||
|
||||
|
||||
def _fetch_and_process_data(
|
||||
session: Session, ent_reg: er.EntityRegistry, user_id: str
|
||||
) -> EntityUsagePredictions:
|
||||
def _fetch_and_process_data(session: Session, user_id: str) -> EntityUsagePredictions:
|
||||
"""Fetch and process service call events from the database."""
|
||||
# Prepare a dictionary to track results
|
||||
results: dict[str, Counter[str]] = {
|
||||
@@ -195,7 +198,6 @@ def _fetch_and_process_data(
|
||||
entity_id
|
||||
for entity_id in entity_ids
|
||||
if entity_id.split(".")[0] in ALLOWED_DOMAINS
|
||||
and ((entry := ent_reg.async_get(entity_id)) is None or not entry.hidden)
|
||||
]
|
||||
|
||||
if not entity_ids:
|
||||
|
@@ -1,34 +0,0 @@
|
||||
"""The Victron VRM Solar Forecast integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import (
|
||||
VictronRemoteMonitoringConfigEntry,
|
||||
VictronRemoteMonitoringDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: VictronRemoteMonitoringConfigEntry
|
||||
) -> bool:
|
||||
"""Set up VRM from a config entry."""
|
||||
coordinator = VictronRemoteMonitoringDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: VictronRemoteMonitoringConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
@@ -1,255 +0,0 @@
|
||||
"""Config flow for the Victron VRM Solar Forecast integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from victron_vrm import VictronVRMClient
|
||||
from victron_vrm.exceptions import AuthenticationError, VictronVRMError
|
||||
from victron_vrm.models import Site
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_API_TOKEN, CONF_SITE_ID, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_TOKEN): str})
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
|
||||
class SiteNotFound(HomeAssistantError):
|
||||
"""Error to indicate the site was not found."""
|
||||
|
||||
|
||||
class VictronRemoteMonitoringFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Victron Remote Monitoring.
|
||||
|
||||
Supports reauthentication when the stored token becomes invalid.
|
||||
"""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize flow state."""
|
||||
self._api_token: str | None = None
|
||||
self._sites: list[Site] = []
|
||||
|
||||
def _build_site_options(self) -> list[SelectOptionDict]:
|
||||
"""Build selector options for the available sites."""
|
||||
return [
|
||||
SelectOptionDict(
|
||||
value=str(site.id), label=f"{(site.name or 'Site')} (ID:{site.id})"
|
||||
)
|
||||
for site in self._sites
|
||||
]
|
||||
|
||||
async def _async_validate_token_and_fetch_sites(self, api_token: str) -> list[Site]:
|
||||
"""Validate the API token and return available sites.
|
||||
|
||||
Raises InvalidAuth on bad/unauthorized token; CannotConnect on other errors.
|
||||
"""
|
||||
client = VictronVRMClient(
|
||||
token=api_token,
|
||||
client_session=get_async_client(self.hass),
|
||||
)
|
||||
try:
|
||||
sites = await client.users.list_sites()
|
||||
except AuthenticationError as err:
|
||||
raise InvalidAuth("Invalid authentication or permission") from err
|
||||
except VictronVRMError as err:
|
||||
if getattr(err, "status_code", None) in (401, 403):
|
||||
raise InvalidAuth("Invalid authentication or permission") from err
|
||||
raise CannotConnect(f"Cannot connect to VRM API: {err}") from err
|
||||
else:
|
||||
return sites
|
||||
|
||||
async def _async_validate_selected_site(self, api_token: str, site_id: int) -> Site:
|
||||
"""Validate access to the selected site and return its data."""
|
||||
client = VictronVRMClient(
|
||||
token=api_token,
|
||||
client_session=get_async_client(self.hass),
|
||||
)
|
||||
try:
|
||||
site_data = await client.users.get_site(site_id)
|
||||
except AuthenticationError as err:
|
||||
raise InvalidAuth("Invalid authentication or permission") from err
|
||||
except VictronVRMError as err:
|
||||
if getattr(err, "status_code", None) in (401, 403):
|
||||
raise InvalidAuth("Invalid authentication or permission") from err
|
||||
raise CannotConnect(f"Cannot connect to VRM API: {err}") from err
|
||||
if site_data is None:
|
||||
raise SiteNotFound(f"Site with ID {site_id} not found")
|
||||
return site_data
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""First step: ask for API token and validate it."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
api_token: str = user_input[CONF_API_TOKEN]
|
||||
try:
|
||||
sites = await self._async_validate_token_and_fetch_sites(api_token)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not sites:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors={"base": "no_sites"},
|
||||
)
|
||||
self._api_token = api_token
|
||||
# Sort sites by name then id for stable order
|
||||
self._sites = sorted(sites, key=lambda s: (s.name or "", s.id))
|
||||
if len(self._sites) == 1:
|
||||
# Only one site available, skip site selection step
|
||||
site = self._sites[0]
|
||||
await self.async_set_unique_id(
|
||||
str(site.id), raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"VRM for {site.name}",
|
||||
data={CONF_API_TOKEN: self._api_token, CONF_SITE_ID: site.id},
|
||||
)
|
||||
return await self.async_step_select_site()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_select_site(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Second step: present sites and validate selection."""
|
||||
assert self._api_token is not None
|
||||
|
||||
if user_input is None:
|
||||
site_options = self._build_site_options()
|
||||
return self.async_show_form(
|
||||
step_id="select_site",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SITE_ID): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=site_options, mode=SelectSelectorMode.DROPDOWN
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# User submitted a site selection
|
||||
site_id = int(user_input[CONF_SITE_ID])
|
||||
# Prevent duplicate entries for the same site
|
||||
self._async_abort_entries_match({CONF_SITE_ID: site_id})
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
site = await self._async_validate_selected_site(self._api_token, site_id)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except SiteNotFound:
|
||||
errors["base"] = "site_not_found"
|
||||
except Exception: # pragma: no cover - unexpected
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Ensure unique ID per site to avoid duplicates across reloads
|
||||
await self.async_set_unique_id(str(site_id), raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"VRM for {site.name}",
|
||||
data={CONF_API_TOKEN: self._api_token, CONF_SITE_ID: site_id},
|
||||
)
|
||||
|
||||
# If we reach here, show the selection form again with errors
|
||||
site_options = self._build_site_options()
|
||||
return self.async_show_form(
|
||||
step_id="select_site",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SITE_ID): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=site_options, mode=SelectSelectorMode.DROPDOWN
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Start reauthentication by asking for a (new) API token.
|
||||
|
||||
We only need the token again; the site is fixed per entry and set as unique id.
|
||||
"""
|
||||
self._api_token = None
|
||||
self._sites = []
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication confirmation with new token."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
new_token = user_input[CONF_API_TOKEN]
|
||||
site_id: int = reauth_entry.data[CONF_SITE_ID]
|
||||
try:
|
||||
# Validate the token by fetching the site for the existing entry
|
||||
await self._async_validate_selected_site(new_token, site_id)
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except SiteNotFound:
|
||||
# Site removed or no longer visible to the account; treat as cannot connect
|
||||
errors["base"] = "site_not_found"
|
||||
except Exception: # pragma: no cover - unexpected
|
||||
_LOGGER.exception("Unexpected exception during reauth")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Update stored token and reload entry
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_API_TOKEN: new_token},
|
||||
reason="reauth_successful",
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}),
|
||||
errors=errors,
|
||||
)
|
@@ -1,9 +0,0 @@
|
||||
"""Constants for the Victron VRM Solar Forecast integration."""
|
||||
|
||||
import logging
|
||||
|
||||
DOMAIN = "victron_remote_monitoring"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
CONF_SITE_ID = "site_id"
|
||||
CONF_API_TOKEN = "api_token"
|
@@ -1,98 +0,0 @@
|
||||
"""VRM Coordinator and Client."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
import datetime
|
||||
|
||||
from victron_vrm import VictronVRMClient
|
||||
from victron_vrm.exceptions import AuthenticationError, VictronVRMError
|
||||
from victron_vrm.models.aggregations import ForecastAggregations
|
||||
from victron_vrm.utils import dt_now
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_API_TOKEN, CONF_SITE_ID, DOMAIN, LOGGER
|
||||
|
||||
type VictronRemoteMonitoringConfigEntry = ConfigEntry[
|
||||
VictronRemoteMonitoringDataUpdateCoordinator
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class VRMForecastStore:
|
||||
"""Class to hold the forecast data."""
|
||||
|
||||
site_id: int
|
||||
solar: ForecastAggregations
|
||||
consumption: ForecastAggregations
|
||||
|
||||
|
||||
async def get_forecast(client: VictronVRMClient, site_id: int) -> VRMForecastStore:
|
||||
"""Get the forecast data."""
|
||||
start = int(
|
||||
(
|
||||
dt_now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
- datetime.timedelta(days=1)
|
||||
).timestamp()
|
||||
)
|
||||
# Get timestamp of the end of 6th day from now
|
||||
end = int(
|
||||
(
|
||||
dt_now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
+ datetime.timedelta(days=6)
|
||||
).timestamp()
|
||||
)
|
||||
stats = await client.installations.stats(
|
||||
site_id,
|
||||
start=start,
|
||||
end=end,
|
||||
interval="hours",
|
||||
type="forecast",
|
||||
return_aggregations=True,
|
||||
)
|
||||
return VRMForecastStore(
|
||||
solar=stats["solar_yield"],
|
||||
consumption=stats["consumption"],
|
||||
site_id=site_id,
|
||||
)
|
||||
|
||||
|
||||
class VictronRemoteMonitoringDataUpdateCoordinator(
|
||||
DataUpdateCoordinator[VRMForecastStore]
|
||||
):
|
||||
"""Class to manage fetching VRM Forecast data."""
|
||||
|
||||
config_entry: VictronRemoteMonitoringConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: VictronRemoteMonitoringConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.client = VictronVRMClient(
|
||||
token=config_entry.data[CONF_API_TOKEN],
|
||||
client_session=get_async_client(hass),
|
||||
)
|
||||
self.site_id = config_entry.data[CONF_SITE_ID]
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=datetime.timedelta(minutes=60),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> VRMForecastStore:
|
||||
"""Fetch data from VRM API."""
|
||||
try:
|
||||
return await get_forecast(self.client, self.site_id)
|
||||
except AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Invalid authentication for VRM API: {err}"
|
||||
) from err
|
||||
except VictronVRMError as err:
|
||||
raise UpdateFailed(f"Cannot connect to VRM API: {err}") from err
|
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"domain": "victron_remote_monitoring",
|
||||
"name": "Victron Remote Monitoring",
|
||||
"codeowners": ["@AndyTempel"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/victron_remote_monitoring",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["victron-vrm==0.1.7"]
|
||||
}
|
@@ -1,66 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: "This integration does not use actions."
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: "This integration does not use actions."
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: "This integration does not use actions."
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: todo
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
@@ -1,250 +0,0 @@
|
||||
"""Support for the VRM Solar Forecast sensor service."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
VictronRemoteMonitoringConfigEntry,
|
||||
VictronRemoteMonitoringDataUpdateCoordinator,
|
||||
VRMForecastStore,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class VRMForecastsSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes a VRM Forecast Sensor."""
|
||||
|
||||
value_fn: Callable[[VRMForecastStore], int | float | datetime | None]
|
||||
|
||||
|
||||
SENSORS: tuple[VRMForecastsSensorEntityDescription, ...] = (
|
||||
# Solar forecast sensors
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_production_estimate_yesterday",
|
||||
translation_key="energy_production_estimate_yesterday",
|
||||
value_fn=lambda estimate: estimate.solar.yesterday_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_production_estimate_today",
|
||||
translation_key="energy_production_estimate_today",
|
||||
value_fn=lambda estimate: estimate.solar.today_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_production_estimate_today_remaining",
|
||||
translation_key="energy_production_estimate_today_remaining",
|
||||
value_fn=lambda estimate: estimate.solar.today_left_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_production_estimate_tomorrow",
|
||||
translation_key="energy_production_estimate_tomorrow",
|
||||
value_fn=lambda estimate: estimate.solar.tomorrow_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="power_highest_peak_time_yesterday",
|
||||
translation_key="power_highest_peak_time_yesterday",
|
||||
value_fn=lambda estimate: estimate.solar.yesterday_peak_time,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="power_highest_peak_time_today",
|
||||
translation_key="power_highest_peak_time_today",
|
||||
value_fn=lambda estimate: estimate.solar.today_peak_time,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="power_highest_peak_time_tomorrow",
|
||||
translation_key="power_highest_peak_time_tomorrow",
|
||||
value_fn=lambda estimate: estimate.solar.tomorrow_peak_time,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_production_current_hour",
|
||||
translation_key="energy_production_current_hour",
|
||||
value_fn=lambda estimate: estimate.solar.current_hour_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_production_next_hour",
|
||||
translation_key="energy_production_next_hour",
|
||||
value_fn=lambda estimate: estimate.solar.next_hour_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
# Consumption forecast sensors
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_consumption_estimate_yesterday",
|
||||
translation_key="energy_consumption_estimate_yesterday",
|
||||
value_fn=lambda estimate: estimate.consumption.yesterday_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_consumption_estimate_today",
|
||||
translation_key="energy_consumption_estimate_today",
|
||||
value_fn=lambda estimate: estimate.consumption.today_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_consumption_estimate_today_remaining",
|
||||
translation_key="energy_consumption_estimate_today_remaining",
|
||||
value_fn=lambda estimate: estimate.consumption.today_left_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_consumption_estimate_tomorrow",
|
||||
translation_key="energy_consumption_estimate_tomorrow",
|
||||
value_fn=lambda estimate: estimate.consumption.tomorrow_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="consumption_highest_peak_time_yesterday",
|
||||
translation_key="consumption_highest_peak_time_yesterday",
|
||||
value_fn=lambda estimate: estimate.consumption.yesterday_peak_time,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="consumption_highest_peak_time_today",
|
||||
translation_key="consumption_highest_peak_time_today",
|
||||
value_fn=lambda estimate: estimate.consumption.today_peak_time,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="consumption_highest_peak_time_tomorrow",
|
||||
translation_key="consumption_highest_peak_time_tomorrow",
|
||||
value_fn=lambda estimate: estimate.consumption.tomorrow_peak_time,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_consumption_current_hour",
|
||||
translation_key="energy_consumption_current_hour",
|
||||
value_fn=lambda estimate: estimate.consumption.current_hour_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
VRMForecastsSensorEntityDescription(
|
||||
key="energy_consumption_next_hour",
|
||||
translation_key="energy_consumption_next_hour",
|
||||
value_fn=lambda estimate: estimate.consumption.next_hour_total,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: VictronRemoteMonitoringConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Defer sensor setup to the shared sensor module."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
VRMForecastsSensorEntity(
|
||||
entry_id=entry.entry_id,
|
||||
coordinator=coordinator,
|
||||
description=entity_description,
|
||||
)
|
||||
for entity_description in SENSORS
|
||||
)
|
||||
|
||||
|
||||
class VRMForecastsSensorEntity(
|
||||
CoordinatorEntity[VictronRemoteMonitoringDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Defines a VRM Solar Forecast sensor."""
|
||||
|
||||
entity_description: VRMForecastsSensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
entry_id: str,
|
||||
coordinator: VictronRemoteMonitoringDataUpdateCoordinator,
|
||||
description: VRMForecastsSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize VRM Solar Forecast sensor."""
|
||||
super().__init__(coordinator=coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.site_id}|{description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, str(coordinator.data.site_id))},
|
||||
manufacturer="Victron Energy",
|
||||
model=f"VRM - {coordinator.data.site_id}",
|
||||
name="Victron Remote Monitoring",
|
||||
configuration_url="https://vrm.victronenergy.com",
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> datetime | StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
@@ -1,102 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Enter your VRM API access token. We will then fetch your available sites.",
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_token": "The API access token for your VRM account"
|
||||
}
|
||||
},
|
||||
"select_site": {
|
||||
"description": "Select the VRM site",
|
||||
"data": {
|
||||
"site_id": "VRM site"
|
||||
},
|
||||
"data_description": {
|
||||
"site_id": "Select one of your VRM sites"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Your existing token is no longer valid. Please enter a new VRM API access token to reauthenticate.",
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_token": "The new API access token for your VRM account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"no_sites": "No sites found for this account",
|
||||
"site_not_found": "Site ID not found. Please check the ID and try again.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"energy_production_estimate_yesterday": {
|
||||
"name": "Estimated energy production - Yesterday"
|
||||
},
|
||||
"energy_production_estimate_today": {
|
||||
"name": "Estimated energy production - Today"
|
||||
},
|
||||
"energy_production_estimate_today_remaining": {
|
||||
"name": "Estimated energy production - Today remaining"
|
||||
},
|
||||
"energy_production_estimate_tomorrow": {
|
||||
"name": "Estimated energy production - Tomorrow"
|
||||
},
|
||||
"power_highest_peak_time_yesterday": {
|
||||
"name": "Highest peak time - Yesterday"
|
||||
},
|
||||
"power_highest_peak_time_today": {
|
||||
"name": "Highest peak time - Today"
|
||||
},
|
||||
"power_highest_peak_time_tomorrow": {
|
||||
"name": "Highest peak time - Tomorrow"
|
||||
},
|
||||
"energy_production_current_hour": {
|
||||
"name": "Estimated energy production - Current hour"
|
||||
},
|
||||
"energy_production_next_hour": {
|
||||
"name": "Estimated energy production - Next hour"
|
||||
},
|
||||
"energy_consumption_estimate_yesterday": {
|
||||
"name": "Estimated energy consumption - Yesterday"
|
||||
},
|
||||
"energy_consumption_estimate_today": {
|
||||
"name": "Estimated energy consumption - Today"
|
||||
},
|
||||
"energy_consumption_estimate_today_remaining": {
|
||||
"name": "Estimated energy consumption - Today remaining"
|
||||
},
|
||||
"energy_consumption_estimate_tomorrow": {
|
||||
"name": "Estimated energy consumption - Tomorrow"
|
||||
},
|
||||
"consumption_highest_peak_time_yesterday": {
|
||||
"name": "Highest consumption peak time - Yesterday"
|
||||
},
|
||||
"consumption_highest_peak_time_today": {
|
||||
"name": "Highest consumption peak time - Today"
|
||||
},
|
||||
"consumption_highest_peak_time_tomorrow": {
|
||||
"name": "Highest consumption peak time - Tomorrow"
|
||||
},
|
||||
"energy_consumption_current_hour": {
|
||||
"name": "Estimated energy consumption - Current hour"
|
||||
},
|
||||
"energy_consumption_next_hour": {
|
||||
"name": "Estimated energy consumption - Next hour"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -4,8 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from volvocarsapi.api import VolvoCarsApi
|
||||
from volvocarsapi.models import VolvoApiException, VolvoAuthException, VolvoCarsVehicle
|
||||
from volvocarsapi.models import VolvoAuthException, VolvoCarsVehicle
|
||||
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -68,22 +69,22 @@ async def _async_auth_and_create_api(
|
||||
oauth_session = OAuth2Session(hass, entry, implementation)
|
||||
web_session = async_get_clientsession(hass)
|
||||
auth = VolvoAuth(web_session, oauth_session)
|
||||
api = VolvoCarsApi(
|
||||
|
||||
try:
|
||||
await auth.async_get_access_token()
|
||||
except ClientResponseError as err:
|
||||
if err.status in (400, 401):
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
return VolvoCarsApi(
|
||||
web_session,
|
||||
auth,
|
||||
entry.data[CONF_API_KEY],
|
||||
entry.data[CONF_VIN],
|
||||
)
|
||||
|
||||
try:
|
||||
await api.async_get_access_token()
|
||||
except VolvoAuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except VolvoApiException as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
return api
|
||||
|
||||
|
||||
async def _async_load_vehicle(api: VolvoCarsApi) -> VolvoCarsVehicle:
|
||||
try:
|
||||
|
@@ -1,16 +1,11 @@
|
||||
"""API for Volvo bound to Home Assistant OAuth."""
|
||||
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from volvocarsapi.auth import AccessTokenManager
|
||||
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
|
||||
from homeassistant.helpers.redact import async_redact_data
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_TO_REDACT = ["access_token", "id_token", "refresh_token"]
|
||||
|
||||
|
||||
class VolvoAuth(AccessTokenManager):
|
||||
@@ -23,20 +18,7 @@ class VolvoAuth(AccessTokenManager):
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Return a valid access token."""
|
||||
current_access_token = self._oauth_session.token["access_token"]
|
||||
current_refresh_token = self._oauth_session.token["refresh_token"]
|
||||
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
|
||||
_LOGGER.debug(
|
||||
"Token: %s", async_redact_data(self._oauth_session.token, _TO_REDACT)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Token changed: access %s, refresh %s",
|
||||
current_access_token != self._oauth_session.token["access_token"],
|
||||
current_refresh_token != self._oauth_session.token["refresh_token"],
|
||||
)
|
||||
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
||||
|
||||
|
||||
|
@@ -103,6 +103,17 @@ class AirConEntity(WhirlpoolEntity, ClimateEntity):
|
||||
"""Return the current humidity."""
|
||||
return self._appliance.get_current_humidity()
|
||||
|
||||
@property
|
||||
def target_humidity(self) -> int:
|
||||
"""Return the humidity we try to reach."""
|
||||
return self._appliance.get_humidity()
|
||||
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set new target humidity."""
|
||||
AirConEntity._check_service_request(
|
||||
await self._appliance.set_humidity(humidity)
|
||||
)
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return current operation ie. heat, cool, fan."""
|
||||
|
@@ -1,25 +1,19 @@
|
||||
"""Base entity for the Whirlpool integration."""
|
||||
|
||||
import logging
|
||||
|
||||
from whirlpool.appliance import Appliance
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WhirlpoolEntity(Entity):
|
||||
"""Base class for Whirlpool entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_unavailable_logged: bool = False
|
||||
|
||||
def __init__(self, appliance: Appliance, unique_id_suffix: str = "") -> None:
|
||||
"""Initialize the entity."""
|
||||
@@ -35,26 +29,16 @@ class WhirlpoolEntity(Entity):
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register attribute updates callback."""
|
||||
self._appliance.register_attr_callback(self._async_attr_callback)
|
||||
self._appliance.register_attr_callback(self.async_write_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Unregister attribute updates callback."""
|
||||
self._appliance.unregister_attr_callback(self._async_attr_callback)
|
||||
self._appliance.unregister_attr_callback(self.async_write_ha_state)
|
||||
|
||||
@callback
|
||||
def _async_attr_callback(self) -> None:
|
||||
_LOGGER.debug("Attribute update for entity %s", self.entity_id)
|
||||
self._attr_available = self._appliance.get_online()
|
||||
|
||||
if not self._attr_available:
|
||||
if not self._unavailable_logged:
|
||||
_LOGGER.info("The entity %s is unavailable", self.entity_id)
|
||||
self._unavailable_logged = True
|
||||
elif self._unavailable_logged:
|
||||
_LOGGER.info("The entity %s is back online", self.entity_id)
|
||||
self._unavailable_logged = False
|
||||
|
||||
self.async_write_ha_state()
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self._appliance.get_online()
|
||||
|
||||
@staticmethod
|
||||
def _check_service_request(result: bool) -> None:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.81"]
|
||||
"requirements": ["holidays==0.80"]
|
||||
}
|
||||
|
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/yale",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["socketio", "engineio", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
|
||||
"requirements": ["yalexs==9.0.1", "yalexs-ble==3.1.2"]
|
||||
}
|
||||
|
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["zha==0.0.71"],
|
||||
"requirements": ["zha==0.0.70"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10C4",
|
||||
|
@@ -8,7 +8,6 @@ from homeassistant.const import (
|
||||
ATTR_GPS_ACCURACY,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_CONDITION,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_ZONE,
|
||||
STATE_UNAVAILABLE,
|
||||
@@ -29,7 +28,6 @@ from . import in_zone
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "zone",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required("zone"): cv.entity_ids,
|
||||
# To support use_trigger_value in automation
|
||||
|
@@ -16,7 +16,6 @@ from homeassistant.const import (
|
||||
CONF_DEVICE_ID,
|
||||
CONF_DOMAIN,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_OPTIONS,
|
||||
CONF_PLATFORM,
|
||||
CONF_TYPE,
|
||||
)
|
||||
@@ -435,13 +434,12 @@ async def async_attach_trigger(
|
||||
|
||||
if trigger_platform == VALUE_UPDATED_PLATFORM_TYPE:
|
||||
zwave_js_config = {
|
||||
CONF_OPTIONS: {
|
||||
CONF_DEVICE_ID: config[CONF_DEVICE_ID],
|
||||
},
|
||||
state.CONF_PLATFORM: trigger_platform,
|
||||
CONF_DEVICE_ID: config[CONF_DEVICE_ID],
|
||||
}
|
||||
copy_available_params(
|
||||
config,
|
||||
zwave_js_config[CONF_OPTIONS],
|
||||
zwave_js_config,
|
||||
[
|
||||
ATTR_COMMAND_CLASS,
|
||||
ATTR_PROPERTY,
|
||||
@@ -455,7 +453,7 @@ async def async_attach_trigger(
|
||||
hass, zwave_js_config
|
||||
)
|
||||
return await attach_value_updated_trigger(
|
||||
hass, zwave_js_config[CONF_OPTIONS], action, trigger_info
|
||||
hass, zwave_js_config, action, trigger_info
|
||||
)
|
||||
|
||||
raise HomeAssistantError(f"Unhandled trigger type {trigger_type}")
|
||||
|
@@ -15,7 +15,6 @@ from homeassistant.const import (
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_OPTIONS,
|
||||
CONF_PLATFORM,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
@@ -26,7 +25,6 @@ from homeassistant.helpers.trigger import (
|
||||
TriggerActionType,
|
||||
TriggerData,
|
||||
TriggerInfo,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -97,37 +95,55 @@ def validate_event_data(obj: dict) -> dict:
|
||||
return obj
|
||||
|
||||
|
||||
_OPTIONS_SCHEMA_DICT = {
|
||||
vol.Optional(ATTR_CONFIG_ENTRY_ID): str,
|
||||
vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_EVENT_SOURCE): vol.In(["controller", "driver", "node"]),
|
||||
vol.Required(ATTR_EVENT): cv.string,
|
||||
vol.Optional(ATTR_EVENT_DATA): dict,
|
||||
vol.Optional(ATTR_PARTIAL_DICT_MATCH, default=False): bool,
|
||||
}
|
||||
|
||||
_CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): vol.All(
|
||||
_OPTIONS_SCHEMA_DICT,
|
||||
validate_event_name,
|
||||
validate_event_data,
|
||||
vol.Any(
|
||||
validate_non_node_event_source,
|
||||
cv.has_at_least_one_key(ATTR_DEVICE_ID, ATTR_ENTITY_ID),
|
||||
),
|
||||
)
|
||||
}
|
||||
TRIGGER_SCHEMA = vol.All(
|
||||
cv.TRIGGER_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): PLATFORM_TYPE,
|
||||
vol.Optional(ATTR_CONFIG_ENTRY_ID): str,
|
||||
vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_EVENT_SOURCE): vol.In(["controller", "driver", "node"]),
|
||||
vol.Required(ATTR_EVENT): cv.string,
|
||||
vol.Optional(ATTR_EVENT_DATA): dict,
|
||||
vol.Optional(ATTR_PARTIAL_DICT_MATCH, default=False): bool,
|
||||
},
|
||||
),
|
||||
validate_event_name,
|
||||
validate_event_data,
|
||||
vol.Any(
|
||||
validate_non_node_event_source,
|
||||
cv.has_at_least_one_key(ATTR_DEVICE_ID, ATTR_ENTITY_ID),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_trigger_config(
|
||||
hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
config = TRIGGER_SCHEMA(config)
|
||||
|
||||
if ATTR_CONFIG_ENTRY_ID in config:
|
||||
entry_id = config[ATTR_CONFIG_ENTRY_ID]
|
||||
if hass.config_entries.async_get_entry(entry_id) is None:
|
||||
raise vol.Invalid(f"Config entry '{entry_id}' not found")
|
||||
|
||||
if async_bypass_dynamic_config_validation(hass, config):
|
||||
return config
|
||||
|
||||
if config[ATTR_EVENT_SOURCE] == "node" and not async_get_nodes_from_targets(
|
||||
hass, config
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"No nodes found for given {ATTR_DEVICE_ID}s or {ATTR_ENTITY_ID}s."
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
class EventTrigger(Trigger):
|
||||
"""Z-Wave JS event trigger."""
|
||||
|
||||
_hass: HomeAssistant
|
||||
_options: ConfigType
|
||||
|
||||
_event_source: str
|
||||
_event_name: str
|
||||
_event_data_filter: dict
|
||||
@@ -137,43 +153,17 @@ class EventTrigger(Trigger):
|
||||
|
||||
_platform_type = PLATFORM_TYPE
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
config = move_top_level_schema_fields_to_options(config, _OPTIONS_SCHEMA_DICT)
|
||||
return await super().async_validate_complete_config(hass, config)
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize trigger."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
config = _CONFIG_SCHEMA(config)
|
||||
options = config[CONF_OPTIONS]
|
||||
|
||||
if ATTR_CONFIG_ENTRY_ID in options:
|
||||
entry_id = options[ATTR_CONFIG_ENTRY_ID]
|
||||
if hass.config_entries.async_get_entry(entry_id) is None:
|
||||
raise vol.Invalid(f"Config entry '{entry_id}' not found")
|
||||
|
||||
if async_bypass_dynamic_config_validation(hass, options):
|
||||
return config
|
||||
|
||||
if options[ATTR_EVENT_SOURCE] == "node" and not async_get_nodes_from_targets(
|
||||
hass, options
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"No nodes found for given {ATTR_DEVICE_ID}s or {ATTR_ENTITY_ID}s."
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize trigger."""
|
||||
self._hass = hass
|
||||
self._options = config[CONF_OPTIONS]
|
||||
return await async_validate_trigger_config(hass, config)
|
||||
|
||||
async def async_attach(
|
||||
self,
|
||||
@@ -182,17 +172,17 @@ class EventTrigger(Trigger):
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
dev_reg = dr.async_get(self._hass)
|
||||
options = self._options
|
||||
if options[ATTR_EVENT_SOURCE] == "node" and not async_get_nodes_from_targets(
|
||||
self._hass, options, dev_reg=dev_reg
|
||||
config = self._config
|
||||
if config[ATTR_EVENT_SOURCE] == "node" and not async_get_nodes_from_targets(
|
||||
self._hass, config, dev_reg=dev_reg
|
||||
):
|
||||
raise ValueError(
|
||||
f"No nodes found for given {ATTR_DEVICE_ID}s or {ATTR_ENTITY_ID}s."
|
||||
)
|
||||
|
||||
self._event_source = options[ATTR_EVENT_SOURCE]
|
||||
self._event_name = options[ATTR_EVENT]
|
||||
self._event_data_filter = options.get(ATTR_EVENT_DATA, {})
|
||||
self._event_source = config[ATTR_EVENT_SOURCE]
|
||||
self._event_name = config[ATTR_EVENT]
|
||||
self._event_data_filter = config.get(ATTR_EVENT_DATA, {})
|
||||
self._job = HassJob(action)
|
||||
self._trigger_data = trigger_info["trigger_data"]
|
||||
self._unsubs: list[Callable] = []
|
||||
@@ -209,7 +199,7 @@ class EventTrigger(Trigger):
|
||||
if key not in event_data:
|
||||
return
|
||||
if (
|
||||
self._options[ATTR_PARTIAL_DICT_MATCH]
|
||||
self._config[ATTR_PARTIAL_DICT_MATCH]
|
||||
and isinstance(event_data[key], dict)
|
||||
and isinstance(val, dict)
|
||||
):
|
||||
@@ -265,10 +255,10 @@ class EventTrigger(Trigger):
|
||||
dev_reg = dr.async_get(self._hass)
|
||||
if not (
|
||||
nodes := async_get_nodes_from_targets(
|
||||
self._hass, self._options, dev_reg=dev_reg
|
||||
self._hass, self._config, dev_reg=dev_reg
|
||||
)
|
||||
):
|
||||
entry_id = self._options[ATTR_CONFIG_ENTRY_ID]
|
||||
entry_id = self._config[ATTR_CONFIG_ENTRY_ID]
|
||||
entry = self._hass.config_entries.async_get_entry(entry_id)
|
||||
assert entry
|
||||
client = entry.runtime_data.client
|
||||
|
@@ -10,22 +10,11 @@ from zwave_js_server.const import CommandClass
|
||||
from zwave_js_server.model.driver import Driver
|
||||
from zwave_js_server.model.value import Value, get_value_id_str
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_OPTIONS,
|
||||
CONF_PLATFORM,
|
||||
MATCH_ALL,
|
||||
)
|
||||
from homeassistant.const import ATTR_DEVICE_ID, ATTR_ENTITY_ID, CONF_PLATFORM, MATCH_ALL
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
TriggerActionType,
|
||||
TriggerInfo,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from ..config_validation import VALUE_SCHEMA
|
||||
@@ -57,26 +46,27 @@ PLATFORM_TYPE = f"{DOMAIN}.{RELATIVE_PLATFORM_TYPE}"
|
||||
ATTR_FROM = "from"
|
||||
ATTR_TO = "to"
|
||||
|
||||
_OPTIONS_SCHEMA_DICT = {
|
||||
vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_COMMAND_CLASS): vol.In(
|
||||
{cc.value: cc.name for cc in CommandClass}
|
||||
TRIGGER_SCHEMA = vol.All(
|
||||
cv.TRIGGER_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): PLATFORM_TYPE,
|
||||
vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_COMMAND_CLASS): vol.In(
|
||||
{cc.value: cc.name for cc in CommandClass}
|
||||
),
|
||||
vol.Required(ATTR_PROPERTY): vol.Any(vol.Coerce(int), cv.string),
|
||||
vol.Optional(ATTR_ENDPOINT): vol.Coerce(int),
|
||||
vol.Optional(ATTR_PROPERTY_KEY): vol.Any(vol.Coerce(int), cv.string),
|
||||
vol.Optional(ATTR_FROM, default=MATCH_ALL): vol.Any(
|
||||
VALUE_SCHEMA, [VALUE_SCHEMA]
|
||||
),
|
||||
vol.Optional(ATTR_TO, default=MATCH_ALL): vol.Any(
|
||||
VALUE_SCHEMA, [VALUE_SCHEMA]
|
||||
),
|
||||
},
|
||||
),
|
||||
vol.Required(ATTR_PROPERTY): vol.Any(vol.Coerce(int), cv.string),
|
||||
vol.Optional(ATTR_ENDPOINT): vol.Coerce(int),
|
||||
vol.Optional(ATTR_PROPERTY_KEY): vol.Any(vol.Coerce(int), cv.string),
|
||||
vol.Optional(ATTR_FROM, default=MATCH_ALL): vol.Any(VALUE_SCHEMA, [VALUE_SCHEMA]),
|
||||
vol.Optional(ATTR_TO, default=MATCH_ALL): vol.Any(VALUE_SCHEMA, [VALUE_SCHEMA]),
|
||||
}
|
||||
|
||||
_CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): vol.All(
|
||||
_OPTIONS_SCHEMA_DICT,
|
||||
cv.has_at_least_one_key(ATTR_ENTITY_ID, ATTR_DEVICE_ID),
|
||||
),
|
||||
},
|
||||
cv.has_at_least_one_key(ATTR_ENTITY_ID, ATTR_DEVICE_ID),
|
||||
)
|
||||
|
||||
|
||||
@@ -84,13 +74,12 @@ async def async_validate_trigger_config(
|
||||
hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
config = _CONFIG_SCHEMA(config)
|
||||
options = config[CONF_OPTIONS]
|
||||
config = TRIGGER_SCHEMA(config)
|
||||
|
||||
if async_bypass_dynamic_config_validation(hass, options):
|
||||
if async_bypass_dynamic_config_validation(hass, config):
|
||||
return config
|
||||
|
||||
if not async_get_nodes_from_targets(hass, options):
|
||||
if not async_get_nodes_from_targets(hass, config):
|
||||
raise vol.Invalid(
|
||||
f"No nodes found for given {ATTR_DEVICE_ID}s or {ATTR_ENTITY_ID}s."
|
||||
)
|
||||
@@ -99,7 +88,7 @@ async def async_validate_trigger_config(
|
||||
|
||||
async def async_attach_trigger(
|
||||
hass: HomeAssistant,
|
||||
options: ConfigType,
|
||||
config: ConfigType,
|
||||
action: TriggerActionType,
|
||||
trigger_info: TriggerInfo,
|
||||
*,
|
||||
@@ -107,17 +96,17 @@ async def async_attach_trigger(
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Listen for state changes based on configuration."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
if not async_get_nodes_from_targets(hass, options, dev_reg=dev_reg):
|
||||
if not async_get_nodes_from_targets(hass, config, dev_reg=dev_reg):
|
||||
raise ValueError(
|
||||
f"No nodes found for given {ATTR_DEVICE_ID}s or {ATTR_ENTITY_ID}s."
|
||||
)
|
||||
|
||||
from_value = options[ATTR_FROM]
|
||||
to_value = options[ATTR_TO]
|
||||
command_class = options[ATTR_COMMAND_CLASS]
|
||||
property_ = options[ATTR_PROPERTY]
|
||||
endpoint = options.get(ATTR_ENDPOINT)
|
||||
property_key = options.get(ATTR_PROPERTY_KEY)
|
||||
from_value = config[ATTR_FROM]
|
||||
to_value = config[ATTR_TO]
|
||||
command_class = config[ATTR_COMMAND_CLASS]
|
||||
property_ = config[ATTR_PROPERTY]
|
||||
endpoint = config.get(ATTR_ENDPOINT)
|
||||
property_key = config.get(ATTR_PROPERTY_KEY)
|
||||
unsubs: list[Callable] = []
|
||||
job = HassJob(action)
|
||||
|
||||
@@ -185,7 +174,7 @@ async def async_attach_trigger(
|
||||
# Nodes list can come from different drivers and we will need to listen to
|
||||
# server connections for all of them.
|
||||
drivers: set[Driver] = set()
|
||||
for node in async_get_nodes_from_targets(hass, options, dev_reg=dev_reg):
|
||||
for node in async_get_nodes_from_targets(hass, config, dev_reg=dev_reg):
|
||||
driver = node.client.driver
|
||||
assert driver is not None # The node comes from the driver.
|
||||
drivers.add(driver)
|
||||
@@ -221,16 +210,10 @@ async def async_attach_trigger(
|
||||
class ValueUpdatedTrigger(Trigger):
|
||||
"""Z-Wave JS value updated trigger."""
|
||||
|
||||
_hass: HomeAssistant
|
||||
_options: ConfigType
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
config = move_top_level_schema_fields_to_options(config, _OPTIONS_SCHEMA_DICT)
|
||||
return await super().async_validate_complete_config(hass, config)
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize trigger."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
@@ -239,11 +222,6 @@ class ValueUpdatedTrigger(Trigger):
|
||||
"""Validate config."""
|
||||
return await async_validate_trigger_config(hass, config)
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize trigger."""
|
||||
self._hass = hass
|
||||
self._options = config[CONF_OPTIONS]
|
||||
|
||||
async def async_attach(
|
||||
self,
|
||||
action: TriggerActionType,
|
||||
@@ -251,5 +229,5 @@ class ValueUpdatedTrigger(Trigger):
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
return await async_attach_trigger(
|
||||
self._hass, self._options, action, trigger_info
|
||||
self._hass, self._config, action, trigger_info
|
||||
)
|
||||
|
@@ -186,7 +186,6 @@ CONF_MONITORED_VARIABLES: Final = "monitored_variables"
|
||||
CONF_NAME: Final = "name"
|
||||
CONF_OFFSET: Final = "offset"
|
||||
CONF_OPTIMISTIC: Final = "optimistic"
|
||||
CONF_OPTIONS: Final = "options"
|
||||
CONF_PACKAGES: Final = "packages"
|
||||
CONF_PARALLEL: Final = "parallel"
|
||||
CONF_PARAMS: Final = "params"
|
||||
|
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -707,7 +707,6 @@ FLOWS = {
|
||||
"version",
|
||||
"vesync",
|
||||
"vicare",
|
||||
"victron_remote_monitoring",
|
||||
"vilfo",
|
||||
"vizio",
|
||||
"vlc_telnet",
|
||||
|
@@ -7252,12 +7252,6 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
"victron_remote_monitoring": {
|
||||
"name": "Victron Remote Monitoring",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
"vilfo": {
|
||||
"name": "Vilfo Router",
|
||||
"integration_type": "hub",
|
||||
|
@@ -1516,16 +1516,21 @@ NUMERIC_STATE_THRESHOLD_SCHEMA = vol.Any(
|
||||
vol.All(str, entity_domain(["input_number", "number", "sensor", "zone"])),
|
||||
)
|
||||
|
||||
CONDITION_BASE_SCHEMA: VolDictType = {
|
||||
|
||||
_CONDITION_COMMON_SCHEMA: VolDictType = {
|
||||
vol.Optional(CONF_ALIAS): string,
|
||||
vol.Optional(CONF_ENABLED): vol.Any(boolean, template),
|
||||
}
|
||||
|
||||
CONDITION_BASE_SCHEMA: VolDictType = {
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): str,
|
||||
}
|
||||
|
||||
NUMERIC_STATE_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "numeric_state",
|
||||
vol.Required(CONF_ENTITY_ID): entity_ids_or_uuids,
|
||||
vol.Optional(CONF_ATTRIBUTE): str,
|
||||
CONF_BELOW: NUMERIC_STATE_THRESHOLD_SCHEMA,
|
||||
@@ -1538,7 +1543,6 @@ NUMERIC_STATE_CONDITION_SCHEMA = vol.All(
|
||||
|
||||
STATE_CONDITION_BASE_SCHEMA = {
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "state",
|
||||
vol.Required(CONF_ENTITY_ID): entity_ids_or_uuids,
|
||||
vol.Optional(CONF_MATCH, default=ENTITY_MATCH_ALL): vol.All(
|
||||
vol.Lower, vol.Any(ENTITY_MATCH_ALL, ENTITY_MATCH_ANY)
|
||||
@@ -1581,7 +1585,6 @@ def STATE_CONDITION_SCHEMA(value: Any) -> dict[str, Any]:
|
||||
TEMPLATE_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "template",
|
||||
vol.Required(CONF_VALUE_TEMPLATE): template,
|
||||
}
|
||||
)
|
||||
@@ -1590,7 +1593,6 @@ TIME_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "time",
|
||||
vol.Optional("before"): vol.Any(
|
||||
time, vol.All(str, entity_domain(["input_datetime", "time", "sensor"]))
|
||||
),
|
||||
@@ -1606,7 +1608,6 @@ TIME_CONDITION_SCHEMA = vol.All(
|
||||
TRIGGER_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "trigger",
|
||||
vol.Required(CONF_ID): vol.All(ensure_list, [string]),
|
||||
}
|
||||
)
|
||||
@@ -1614,7 +1615,6 @@ TRIGGER_CONDITION_SCHEMA = vol.Schema(
|
||||
AND_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "and",
|
||||
vol.Required(CONF_CONDITIONS): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1625,7 +1625,7 @@ AND_CONDITION_SCHEMA = vol.Schema(
|
||||
|
||||
AND_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required("and"): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1637,7 +1637,6 @@ AND_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
OR_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "or",
|
||||
vol.Required(CONF_CONDITIONS): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1648,7 +1647,7 @@ OR_CONDITION_SCHEMA = vol.Schema(
|
||||
|
||||
OR_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required("or"): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1660,7 +1659,6 @@ OR_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
NOT_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "not",
|
||||
vol.Required(CONF_CONDITIONS): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1671,7 +1669,7 @@ NOT_CONDITION_SCHEMA = vol.Schema(
|
||||
|
||||
NOT_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required("not"): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1683,7 +1681,6 @@ NOT_CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
DEVICE_CONDITION_BASE_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "device",
|
||||
vol.Required(CONF_DEVICE_ID): str,
|
||||
vol.Required(CONF_DOMAIN): str,
|
||||
vol.Remove("metadata"): dict,
|
||||
@@ -1739,7 +1736,7 @@ dynamic_template_condition = vol.All(
|
||||
|
||||
CONDITION_SHORTHAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): vol.All(
|
||||
ensure_list,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
@@ -1766,7 +1763,7 @@ BUILT_IN_CONDITIONS: ValueSchemas = {
|
||||
def _base_condition_validator(value: Any) -> Any:
|
||||
vol.Schema(
|
||||
{
|
||||
**CONDITION_BASE_SCHEMA,
|
||||
**_CONDITION_COMMON_SCHEMA,
|
||||
CONF_CONDITION: vol.All(str, vol.NotIn(BUILT_IN_CONDITIONS)),
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
@@ -1795,7 +1792,7 @@ CONDITIONS_SCHEMA = vol.All(ensure_list, [CONDITION_SCHEMA])
|
||||
dynamic_template_condition_action = vol.All(
|
||||
# Wrap a shorthand template condition action in a template condition
|
||||
vol.Schema(
|
||||
{**CONDITION_BASE_SCHEMA, vol.Required(CONF_CONDITION): dynamic_template}
|
||||
{**_CONDITION_COMMON_SCHEMA, vol.Required(CONF_CONDITION): dynamic_template}
|
||||
),
|
||||
lambda config: {
|
||||
**config,
|
||||
|
@@ -25,7 +25,6 @@ from homeassistant.const import (
|
||||
ATTR_ASSUMED_STATE,
|
||||
ATTR_ATTRIBUTION,
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_ENTITY_PICTURE,
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_ICON,
|
||||
@@ -536,7 +535,6 @@ class Entity(
|
||||
_attr_device_info: DeviceInfo | None = None
|
||||
_attr_entity_category: EntityCategory | None
|
||||
_attr_has_entity_name: bool
|
||||
_attr_included_entities: list[str] | None
|
||||
_attr_entity_picture: str | None = None
|
||||
_attr_entity_registry_enabled_default: bool
|
||||
_attr_entity_registry_visible_default: bool
|
||||
@@ -767,16 +765,6 @@ class Entity(
|
||||
"""
|
||||
return self._attr_capability_attributes
|
||||
|
||||
@property
|
||||
def included_entities(self) -> list[str] | None:
|
||||
"""Return a list of entity IDs if the entity represents a group.
|
||||
|
||||
Included entities will be shown as members in the UI.
|
||||
"""
|
||||
if hasattr(self, "_attr_included_entities"):
|
||||
return self._attr_included_entities
|
||||
return None
|
||||
|
||||
def get_initial_entity_options(self) -> er.EntityOptionsType | None:
|
||||
"""Return initial entity options.
|
||||
|
||||
@@ -805,18 +793,9 @@ class Entity(
|
||||
Implemented by platform classes. Convention for attribute names
|
||||
is lowercase snake_case.
|
||||
"""
|
||||
entity_ids = (
|
||||
None
|
||||
if self.included_entities is None
|
||||
else {ATTR_ENTITY_ID: self.included_entities}
|
||||
)
|
||||
if hasattr(self, "_attr_extra_state_attributes"):
|
||||
return (
|
||||
self._attr_extra_state_attributes
|
||||
if entity_ids is None
|
||||
else self._attr_extra_state_attributes | entity_ids
|
||||
)
|
||||
return None or entity_ids
|
||||
return self._attr_extra_state_attributes
|
||||
return None
|
||||
|
||||
@cached_property
|
||||
def device_info(self) -> DeviceInfo | None:
|
||||
|
@@ -18,9 +18,11 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
HassJob,
|
||||
HassJobType,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
@@ -29,7 +31,14 @@ from homeassistant.loader import async_get_integration, bind_hass
|
||||
from homeassistant.setup import async_prepare_setup_platform
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from . import device_registry as dr, discovery, entity, entity_registry as er, service
|
||||
from . import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
discovery,
|
||||
entity,
|
||||
entity_registry as er,
|
||||
service,
|
||||
)
|
||||
from .entity_platform import EntityPlatform, async_calculate_suggested_object_id
|
||||
from .typing import ConfigType, DiscoveryInfoType, VolDictType, VolSchemaType
|
||||
|
||||
@@ -243,6 +252,43 @@ class EntityComponent[_EntityT: entity.Entity = entity.Entity]:
|
||||
self.hass, self.entities, service_call, expand_group
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_register_legacy_entity_service(
|
||||
self,
|
||||
name: str,
|
||||
schema: VolDictType | VolSchemaType,
|
||||
func: str | Callable[..., Any],
|
||||
required_features: list[int] | None = None,
|
||||
supports_response: SupportsResponse = SupportsResponse.NONE,
|
||||
) -> None:
|
||||
"""Register an entity service with a legacy response format."""
|
||||
if isinstance(schema, dict):
|
||||
schema = cv.make_entity_service_schema(schema)
|
||||
|
||||
service_func: str | HassJob[..., Any]
|
||||
service_func = func if isinstance(func, str) else HassJob(func)
|
||||
|
||||
async def handle_service(
|
||||
call: ServiceCall,
|
||||
) -> ServiceResponse:
|
||||
"""Handle the service."""
|
||||
|
||||
result = await service.entity_service_call(
|
||||
self.hass, self._entities, service_func, call, required_features
|
||||
)
|
||||
|
||||
if result:
|
||||
if len(result) > 1:
|
||||
raise HomeAssistantError(
|
||||
"Deprecated service call matched more than one entity"
|
||||
)
|
||||
return result.popitem()[1]
|
||||
return None
|
||||
|
||||
self.hass.services.async_register(
|
||||
self.domain, name, handle_service, schema, supports_response
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_register_entity_service(
|
||||
self,
|
||||
|
@@ -96,10 +96,10 @@ class TargetSelectorData:
|
||||
class SelectedEntities:
|
||||
"""Class to hold the selected entities."""
|
||||
|
||||
# Entity IDs of entities that were explicitly mentioned.
|
||||
# Entities that were explicitly mentioned.
|
||||
referenced: set[str] = dataclasses.field(default_factory=set)
|
||||
|
||||
# Entity IDs of entities that were referenced via device/area/floor/label ID.
|
||||
# Entities that were referenced via device/area/floor/label ID.
|
||||
# Should not trigger a warning when they don't exist.
|
||||
indirectly_referenced: set[str] = dataclasses.field(default_factory=set)
|
||||
|
||||
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from ast import literal_eval
|
||||
import asyncio
|
||||
import collections.abc
|
||||
from collections.abc import Callable, Generator, Iterable
|
||||
from collections.abc import Callable, Generator, Iterable, MutableSequence
|
||||
from contextlib import AbstractContextManager
|
||||
from contextvars import ContextVar
|
||||
from copy import deepcopy
|
||||
@@ -2245,6 +2245,31 @@ def is_number(value):
|
||||
return True
|
||||
|
||||
|
||||
def _is_list(value: Any) -> bool:
|
||||
"""Return whether a value is a list."""
|
||||
return isinstance(value, list)
|
||||
|
||||
|
||||
def _is_set(value: Any) -> bool:
|
||||
"""Return whether a value is a set."""
|
||||
return isinstance(value, set)
|
||||
|
||||
|
||||
def _is_tuple(value: Any) -> bool:
|
||||
"""Return whether a value is a tuple."""
|
||||
return isinstance(value, tuple)
|
||||
|
||||
|
||||
def _to_set(value: Any) -> set[Any]:
|
||||
"""Convert value to set."""
|
||||
return set(value)
|
||||
|
||||
|
||||
def _to_tuple(value):
|
||||
"""Convert value to tuple."""
|
||||
return tuple(value)
|
||||
|
||||
|
||||
def _is_datetime(value: Any) -> bool:
|
||||
"""Return whether a value is a datetime."""
|
||||
return isinstance(value, datetime)
|
||||
@@ -2462,11 +2487,98 @@ def iif(
|
||||
return if_false
|
||||
|
||||
|
||||
def shuffle(*args: Any, seed: Any = None) -> MutableSequence[Any]:
|
||||
"""Shuffle a list, either with a seed or without."""
|
||||
if not args:
|
||||
raise TypeError("shuffle expected at least 1 argument, got 0")
|
||||
|
||||
# If first argument is iterable and more than 1 argument provided
|
||||
# but not a named seed, then use 2nd argument as seed.
|
||||
if isinstance(args[0], Iterable):
|
||||
items = list(args[0])
|
||||
if len(args) > 1 and seed is None:
|
||||
seed = args[1]
|
||||
elif len(args) == 1:
|
||||
raise TypeError(f"'{type(args[0]).__name__}' object is not iterable")
|
||||
else:
|
||||
items = list(args)
|
||||
|
||||
if seed:
|
||||
r = random.Random(seed)
|
||||
r.shuffle(items)
|
||||
else:
|
||||
random.shuffle(items)
|
||||
return items
|
||||
|
||||
|
||||
def typeof(value: Any) -> Any:
|
||||
"""Return the type of value passed to debug types."""
|
||||
return value.__class__.__name__
|
||||
|
||||
|
||||
def flatten(value: Iterable[Any], levels: int | None = None) -> list[Any]:
|
||||
"""Flattens list of lists."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(f"flatten expected a list, got {type(value).__name__}")
|
||||
|
||||
flattened: list[Any] = []
|
||||
for item in value:
|
||||
if isinstance(item, Iterable) and not isinstance(item, str):
|
||||
if levels is None:
|
||||
flattened.extend(flatten(item))
|
||||
elif levels >= 1:
|
||||
flattened.extend(flatten(item, levels=(levels - 1)))
|
||||
else:
|
||||
flattened.append(item)
|
||||
else:
|
||||
flattened.append(item)
|
||||
return flattened
|
||||
|
||||
|
||||
def intersect(value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
|
||||
"""Return the common elements between two lists."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(f"intersect expected a list, got {type(value).__name__}")
|
||||
if not isinstance(other, Iterable) or isinstance(other, str):
|
||||
raise TypeError(f"intersect expected a list, got {type(other).__name__}")
|
||||
|
||||
return list(set(value) & set(other))
|
||||
|
||||
|
||||
def difference(value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
|
||||
"""Return elements in first list that are not in second list."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(f"difference expected a list, got {type(value).__name__}")
|
||||
if not isinstance(other, Iterable) or isinstance(other, str):
|
||||
raise TypeError(f"difference expected a list, got {type(other).__name__}")
|
||||
|
||||
return list(set(value) - set(other))
|
||||
|
||||
|
||||
def union(value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
|
||||
"""Return all unique elements from both lists combined."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(f"union expected a list, got {type(value).__name__}")
|
||||
if not isinstance(other, Iterable) or isinstance(other, str):
|
||||
raise TypeError(f"union expected a list, got {type(other).__name__}")
|
||||
|
||||
return list(set(value) | set(other))
|
||||
|
||||
|
||||
def symmetric_difference(value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
|
||||
"""Return elements that are in either list but not in both."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(
|
||||
f"symmetric_difference expected a list, got {type(value).__name__}"
|
||||
)
|
||||
if not isinstance(other, Iterable) or isinstance(other, str):
|
||||
raise TypeError(
|
||||
f"symmetric_difference expected a list, got {type(other).__name__}"
|
||||
)
|
||||
|
||||
return list(set(value) ^ set(other))
|
||||
|
||||
|
||||
def combine(*args: Any, recursive: bool = False) -> dict[Any, Any]:
|
||||
"""Combine multiple dictionaries into one."""
|
||||
if not args:
|
||||
@@ -2648,15 +2760,11 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.add_extension("jinja2.ext.loopcontrols")
|
||||
self.add_extension("jinja2.ext.do")
|
||||
self.add_extension("homeassistant.helpers.template.extensions.Base64Extension")
|
||||
self.add_extension(
|
||||
"homeassistant.helpers.template.extensions.CollectionExtension"
|
||||
)
|
||||
self.add_extension("homeassistant.helpers.template.extensions.CryptoExtension")
|
||||
self.add_extension("homeassistant.helpers.template.extensions.MathExtension")
|
||||
self.add_extension("homeassistant.helpers.template.extensions.RegexExtension")
|
||||
self.add_extension("homeassistant.helpers.template.extensions.StringExtension")
|
||||
|
||||
self.globals["apply"] = apply
|
||||
self.globals["as_datetime"] = as_datetime
|
||||
self.globals["as_function"] = as_function
|
||||
self.globals["as_local"] = dt_util.as_local
|
||||
@@ -2664,15 +2772,23 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.globals["as_timestamp"] = forgiving_as_timestamp
|
||||
self.globals["bool"] = forgiving_boolean
|
||||
self.globals["combine"] = combine
|
||||
self.globals["difference"] = difference
|
||||
self.globals["flatten"] = flatten
|
||||
self.globals["float"] = forgiving_float
|
||||
self.globals["iif"] = iif
|
||||
self.globals["int"] = forgiving_int
|
||||
self.globals["intersect"] = intersect
|
||||
self.globals["is_number"] = is_number
|
||||
self.globals["merge_response"] = merge_response
|
||||
self.globals["pack"] = struct_pack
|
||||
self.globals["set"] = _to_set
|
||||
self.globals["shuffle"] = shuffle
|
||||
self.globals["strptime"] = strptime
|
||||
self.globals["symmetric_difference"] = symmetric_difference
|
||||
self.globals["timedelta"] = timedelta
|
||||
self.globals["tuple"] = _to_tuple
|
||||
self.globals["typeof"] = typeof
|
||||
self.globals["union"] = union
|
||||
self.globals["unpack"] = struct_unpack
|
||||
self.globals["version"] = version
|
||||
self.globals["zip"] = zip
|
||||
@@ -2687,11 +2803,14 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.filters["bool"] = forgiving_boolean
|
||||
self.filters["combine"] = combine
|
||||
self.filters["contains"] = contains
|
||||
self.filters["difference"] = difference
|
||||
self.filters["flatten"] = flatten
|
||||
self.filters["float"] = forgiving_float_filter
|
||||
self.filters["from_json"] = from_json
|
||||
self.filters["from_hex"] = from_hex
|
||||
self.filters["iif"] = iif
|
||||
self.filters["int"] = forgiving_int_filter
|
||||
self.filters["intersect"] = intersect
|
||||
self.filters["is_defined"] = fail_when_undefined
|
||||
self.filters["is_number"] = is_number
|
||||
self.filters["multiply"] = multiply
|
||||
@@ -2699,11 +2818,14 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.filters["pack"] = struct_pack
|
||||
self.filters["random"] = random_every_time
|
||||
self.filters["round"] = forgiving_round
|
||||
self.filters["shuffle"] = shuffle
|
||||
self.filters["symmetric_difference"] = symmetric_difference
|
||||
self.filters["timestamp_custom"] = timestamp_custom
|
||||
self.filters["timestamp_local"] = timestamp_local
|
||||
self.filters["timestamp_utc"] = timestamp_utc
|
||||
self.filters["to_json"] = to_json
|
||||
self.filters["typeof"] = typeof
|
||||
self.filters["union"] = union
|
||||
self.filters["unpack"] = struct_unpack
|
||||
self.filters["version"] = version
|
||||
|
||||
@@ -2711,7 +2833,10 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.tests["contains"] = contains
|
||||
self.tests["datetime"] = _is_datetime
|
||||
self.tests["is_number"] = is_number
|
||||
self.tests["list"] = _is_list
|
||||
self.tests["set"] = _is_set
|
||||
self.tests["string_like"] = _is_string_like
|
||||
self.tests["tuple"] = _is_tuple
|
||||
|
||||
if hass is None:
|
||||
return
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Home Assistant template extensions."""
|
||||
|
||||
from .base64 import Base64Extension
|
||||
from .collection import CollectionExtension
|
||||
from .crypto import CryptoExtension
|
||||
from .math import MathExtension
|
||||
from .regex import RegexExtension
|
||||
@@ -9,7 +8,6 @@ from .string import StringExtension
|
||||
|
||||
__all__ = [
|
||||
"Base64Extension",
|
||||
"CollectionExtension",
|
||||
"CryptoExtension",
|
||||
"MathExtension",
|
||||
"RegexExtension",
|
||||
|
@@ -1,191 +0,0 @@
|
||||
"""Collection and data structure functions for Home Assistant templates."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable, MutableSequence
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from .base import BaseTemplateExtension, TemplateFunction
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.template import TemplateEnvironment
|
||||
|
||||
|
||||
class CollectionExtension(BaseTemplateExtension):
|
||||
"""Extension for collection and data structure operations."""
|
||||
|
||||
def __init__(self, environment: TemplateEnvironment) -> None:
|
||||
"""Initialize the collection extension."""
|
||||
super().__init__(
|
||||
environment,
|
||||
functions=[
|
||||
TemplateFunction(
|
||||
"flatten",
|
||||
self.flatten,
|
||||
as_global=True,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"shuffle",
|
||||
self.shuffle,
|
||||
as_global=True,
|
||||
as_filter=True,
|
||||
),
|
||||
# Set operations
|
||||
TemplateFunction(
|
||||
"intersect",
|
||||
self.intersect,
|
||||
as_global=True,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"difference",
|
||||
self.difference,
|
||||
as_global=True,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"union",
|
||||
self.union,
|
||||
as_global=True,
|
||||
as_filter=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"symmetric_difference",
|
||||
self.symmetric_difference,
|
||||
as_global=True,
|
||||
as_filter=True,
|
||||
),
|
||||
# Type conversion functions
|
||||
TemplateFunction(
|
||||
"set",
|
||||
self.to_set,
|
||||
as_global=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"tuple",
|
||||
self.to_tuple,
|
||||
as_global=True,
|
||||
),
|
||||
# Type checking functions (tests)
|
||||
TemplateFunction(
|
||||
"list",
|
||||
self.is_list,
|
||||
as_test=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"set",
|
||||
self.is_set,
|
||||
as_test=True,
|
||||
),
|
||||
TemplateFunction(
|
||||
"tuple",
|
||||
self.is_tuple,
|
||||
as_test=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
def flatten(self, value: Iterable[Any], levels: int | None = None) -> list[Any]:
|
||||
"""Flatten list of lists."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(f"flatten expected a list, got {type(value).__name__}")
|
||||
|
||||
flattened: list[Any] = []
|
||||
for item in value:
|
||||
if isinstance(item, Iterable) and not isinstance(item, str):
|
||||
if levels is None:
|
||||
flattened.extend(self.flatten(item))
|
||||
elif levels >= 1:
|
||||
flattened.extend(self.flatten(item, levels=(levels - 1)))
|
||||
else:
|
||||
flattened.append(item)
|
||||
else:
|
||||
flattened.append(item)
|
||||
return flattened
|
||||
|
||||
def shuffle(self, *args: Any, seed: Any = None) -> MutableSequence[Any]:
|
||||
"""Shuffle a list, either with a seed or without."""
|
||||
if not args:
|
||||
raise TypeError("shuffle expected at least 1 argument, got 0")
|
||||
|
||||
# If first argument is iterable and more than 1 argument provided
|
||||
# but not a named seed, then use 2nd argument as seed.
|
||||
if isinstance(args[0], Iterable) and not isinstance(args[0], str):
|
||||
items = list(args[0])
|
||||
if len(args) > 1 and seed is None:
|
||||
seed = args[1]
|
||||
elif len(args) == 1:
|
||||
raise TypeError(f"'{type(args[0]).__name__}' object is not iterable")
|
||||
else:
|
||||
items = list(args)
|
||||
|
||||
if seed:
|
||||
r = random.Random(seed)
|
||||
r.shuffle(items)
|
||||
else:
|
||||
random.shuffle(items)
|
||||
return items
|
||||
|
||||
def intersect(self, value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
|
||||
"""Return the common elements between two lists."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(f"intersect expected a list, got {type(value).__name__}")
|
||||
if not isinstance(other, Iterable) or isinstance(other, str):
|
||||
raise TypeError(f"intersect expected a list, got {type(other).__name__}")
|
||||
|
||||
return list(set(value) & set(other))
|
||||
|
||||
def difference(self, value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
|
||||
"""Return elements in first list that are not in second list."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(f"difference expected a list, got {type(value).__name__}")
|
||||
if not isinstance(other, Iterable) or isinstance(other, str):
|
||||
raise TypeError(f"difference expected a list, got {type(other).__name__}")
|
||||
|
||||
return list(set(value) - set(other))
|
||||
|
||||
def union(self, value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
|
||||
"""Return all unique elements from both lists combined."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(f"union expected a list, got {type(value).__name__}")
|
||||
if not isinstance(other, Iterable) or isinstance(other, str):
|
||||
raise TypeError(f"union expected a list, got {type(other).__name__}")
|
||||
|
||||
return list(set(value) | set(other))
|
||||
|
||||
def symmetric_difference(
|
||||
self, value: Iterable[Any], other: Iterable[Any]
|
||||
) -> list[Any]:
|
||||
"""Return elements that are in either list but not in both."""
|
||||
if not isinstance(value, Iterable) or isinstance(value, str):
|
||||
raise TypeError(
|
||||
f"symmetric_difference expected a list, got {type(value).__name__}"
|
||||
)
|
||||
if not isinstance(other, Iterable) or isinstance(other, str):
|
||||
raise TypeError(
|
||||
f"symmetric_difference expected a list, got {type(other).__name__}"
|
||||
)
|
||||
|
||||
return list(set(value) ^ set(other))
|
||||
|
||||
def to_set(self, value: Any) -> set[Any]:
|
||||
"""Convert value to set."""
|
||||
return set(value)
|
||||
|
||||
def to_tuple(self, value: Any) -> tuple[Any, ...]:
|
||||
"""Convert value to tuple."""
|
||||
return tuple(value)
|
||||
|
||||
def is_list(self, value: Any) -> bool:
|
||||
"""Return whether a value is a list."""
|
||||
return isinstance(value, list)
|
||||
|
||||
def is_set(self, value: Any) -> bool:
|
||||
"""Return whether a value is a set."""
|
||||
return isinstance(value, set)
|
||||
|
||||
def is_tuple(self, value: Any) -> bool:
|
||||
"""Return whether a value is a tuple."""
|
||||
return isinstance(value, tuple)
|
@@ -18,10 +18,8 @@ from homeassistant.const import (
|
||||
CONF_ALIAS,
|
||||
CONF_ENABLED,
|
||||
CONF_ID,
|
||||
CONF_OPTIONS,
|
||||
CONF_PLATFORM,
|
||||
CONF_SELECTOR,
|
||||
CONF_TARGET,
|
||||
CONF_VARIABLES,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
@@ -76,17 +74,17 @@ TRIGGERS: HassKey[dict[str, str]] = HassKey("triggers")
|
||||
|
||||
# Basic schemas to sanity check the trigger descriptions,
|
||||
# full validation is done by hassfest.triggers
|
||||
_FIELD_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_FIELD_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SELECTOR): selector.validate_selector,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_TRIGGER_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional("target"): TargetSelector.CONFIG_SCHEMA,
|
||||
vol.Optional("fields"): vol.Schema({str: _FIELD_DESCRIPTION_SCHEMA}),
|
||||
vol.Optional("fields"): vol.Schema({str: _FIELD_SCHEMA}),
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
@@ -99,10 +97,10 @@ def starts_with_dot(key: str) -> str:
|
||||
return key
|
||||
|
||||
|
||||
_TRIGGERS_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_TRIGGERS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Remove(vol.All(str, starts_with_dot)): object,
|
||||
cv.underscore_slug: vol.Any(None, _TRIGGER_DESCRIPTION_SCHEMA),
|
||||
cv.underscore_slug: vol.Any(None, _TRIGGER_SCHEMA),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -167,41 +165,11 @@ async def _register_trigger_platform(
|
||||
_LOGGER.exception("Error while notifying trigger platform listener")
|
||||
|
||||
|
||||
_TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS): object,
|
||||
vol.Optional(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Trigger(abc.ABC):
|
||||
"""Trigger class."""
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config.
|
||||
|
||||
The complete config includes fields that are generic to all triggers,
|
||||
such as the alias or the ID.
|
||||
This method should be overridden by triggers that need to migrate
|
||||
from the old-style config.
|
||||
"""
|
||||
config = _TRIGGER_SCHEMA(config)
|
||||
|
||||
specific_config: ConfigType = {}
|
||||
for key in (CONF_OPTIONS, CONF_TARGET):
|
||||
if key in config:
|
||||
specific_config[key] = config.pop(key)
|
||||
specific_config = await cls.async_validate_config(hass, specific_config)
|
||||
|
||||
for key in (CONF_OPTIONS, CONF_TARGET):
|
||||
if key in specific_config:
|
||||
config[key] = specific_config[key]
|
||||
|
||||
return config
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize trigger."""
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -210,9 +178,6 @@ class Trigger(abc.ABC):
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize trigger."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_attach(
|
||||
self,
|
||||
@@ -392,29 +357,6 @@ class PluggableAction:
|
||||
await task
|
||||
|
||||
|
||||
def move_top_level_schema_fields_to_options(
|
||||
config: ConfigType, options_schema_dict: dict[vol.Marker, Any]
|
||||
) -> ConfigType:
|
||||
"""Move top-level fields to options.
|
||||
|
||||
This function is used to help migrating old-style configs to new-style configs.
|
||||
If options is already present, the config is returned as-is.
|
||||
"""
|
||||
if CONF_OPTIONS in config:
|
||||
return config
|
||||
|
||||
config = config.copy()
|
||||
options = config.setdefault(CONF_OPTIONS, {})
|
||||
|
||||
# Move top-level fields to options
|
||||
for key_marked in options_schema_dict:
|
||||
key = key_marked.schema
|
||||
if key in config:
|
||||
options[key] = config.pop(key)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
async def _async_get_trigger_platform(
|
||||
hass: HomeAssistant, trigger_key: str
|
||||
) -> tuple[str, TriggerProtocol]:
|
||||
@@ -448,7 +390,7 @@ async def async_validate_trigger_config(
|
||||
)
|
||||
if not (trigger := trigger_descriptors.get(relative_trigger_key)):
|
||||
raise vol.Invalid(f"Invalid trigger '{trigger_key}' specified")
|
||||
conf = await trigger.async_validate_complete_config(hass, conf)
|
||||
conf = await trigger.async_validate_config(hass, conf)
|
||||
elif hasattr(platform, "async_validate_trigger_config"):
|
||||
conf = await platform.async_validate_trigger_config(hass, conf)
|
||||
else:
|
||||
@@ -595,7 +537,7 @@ def _load_triggers_file(integration: Integration) -> dict[str, Any]:
|
||||
try:
|
||||
return cast(
|
||||
dict[str, Any],
|
||||
_TRIGGERS_DESCRIPTION_SCHEMA(
|
||||
_TRIGGERS_SCHEMA(
|
||||
load_yaml_dict(str(integration.file_path / "triggers.yaml"))
|
||||
),
|
||||
)
|
||||
|
@@ -168,22 +168,6 @@ class BaseUnitConverter:
|
||||
return (from_unit in cls._UNIT_INVERSES) != (to_unit in cls._UNIT_INVERSES)
|
||||
|
||||
|
||||
class CarbonMonoxideConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert carbon monoxide ratio to mass per volume."""
|
||||
|
||||
UNIT_CLASS = "carbon_monoxide"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_MILLION: 1,
|
||||
# concentration (mg/m3) = 0.0409 x concentration (ppm) x molecular weight
|
||||
# Carbon monoxide molecular weight: 28.01 g/mol
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: 0.0409 * 28.01,
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class DataRateConverter(BaseUnitConverter):
|
||||
"""Utility to convert data rate values."""
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user