mirror of
https://github.com/home-assistant/core.git
synced 2025-12-18 13:58:01 +00:00
Compare commits
3 Commits
dev
...
input_bool
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ee0230f3b1 | ||
|
|
851fd467fe | ||
|
|
d10148a175 |
3
.github/copilot-instructions.md
vendored
3
.github/copilot-instructions.md
vendored
@@ -51,9 +51,6 @@ rules:
|
||||
- **Missing imports** - We use static analysis tooling to catch that
|
||||
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
|
||||
|
||||
**Git commit practices during review:**
|
||||
- **Do NOT amend, squash, or rebase commits after review has started** - Reviewers need to see what changed since their last review
|
||||
|
||||
## Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
|
||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -24,7 +24,7 @@ ENV \
|
||||
COPY rootfs /
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:f394f6329f5389a4c9a7fc54b09fdec9621bbb78bf7a672b973440bbdfb02241 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
RUN \
|
||||
# Verify go2rtc can be executed
|
||||
|
||||
@@ -130,6 +130,7 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"cover",
|
||||
"device_tracker",
|
||||
"fan",
|
||||
"input_boolean",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
"media_player",
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"domain": "blackbird",
|
||||
"name": "Monoprice Blackbird Matrix Switch",
|
||||
"codeowners": [],
|
||||
"disabled": "This integration is disabled because it references pyserial-asyncio, which does blocking I/O in the asyncio loop and is not maintained.",
|
||||
"documentation": "https://www.home-assistant.io/integrations/blackbird",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyblackbird"],
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.3.0",
|
||||
"aioesphomeapi==43.0.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -6,7 +6,7 @@ from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from pyfritzhome import Fritzhome, FritzhomeDevice, LoginError
|
||||
from pyfritzhome.devicetypes import FritzhomeTemplate, FritzhomeTrigger
|
||||
from pyfritzhome.devicetypes import FritzhomeTemplate
|
||||
from requests.exceptions import ConnectionError as RequestConnectionError, HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -27,7 +27,6 @@ class FritzboxCoordinatorData:
|
||||
|
||||
devices: dict[str, FritzhomeDevice]
|
||||
templates: dict[str, FritzhomeTemplate]
|
||||
triggers: dict[str, FritzhomeTrigger]
|
||||
supported_color_properties: dict[str, tuple[dict, list]]
|
||||
|
||||
|
||||
@@ -38,7 +37,6 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
configuration_url: str
|
||||
fritz: Fritzhome
|
||||
has_templates: bool
|
||||
has_triggers: bool
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: FritzboxConfigEntry) -> None:
|
||||
"""Initialize the Fritzbox Smarthome device coordinator."""
|
||||
@@ -52,9 +50,8 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
self.new_devices: set[str] = set()
|
||||
self.new_templates: set[str] = set()
|
||||
self.new_triggers: set[str] = set()
|
||||
|
||||
self.data = FritzboxCoordinatorData({}, {}, {}, {})
|
||||
self.data = FritzboxCoordinatorData({}, {}, {})
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
@@ -77,11 +74,6 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
)
|
||||
LOGGER.debug("enable smarthome templates: %s", self.has_templates)
|
||||
|
||||
self.has_triggers = await self.hass.async_add_executor_job(
|
||||
self.fritz.has_triggers
|
||||
)
|
||||
LOGGER.debug("enable smarthome triggers: %s", self.has_triggers)
|
||||
|
||||
self.configuration_url = self.fritz.get_prefixed_host()
|
||||
|
||||
await self.async_config_entry_first_refresh()
|
||||
@@ -100,7 +92,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
available_main_ains = [
|
||||
ain
|
||||
for ain, dev in (data.devices | data.templates | data.triggers).items()
|
||||
for ain, dev in data.devices.items() | data.templates.items()
|
||||
if dev.device_and_unit_id[1] is None
|
||||
]
|
||||
device_reg = dr.async_get(self.hass)
|
||||
@@ -120,9 +112,6 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
self.fritz.update_devices(ignore_removed=False)
|
||||
if self.has_templates:
|
||||
self.fritz.update_templates(ignore_removed=False)
|
||||
if self.has_triggers:
|
||||
self.fritz.update_triggers(ignore_removed=False)
|
||||
|
||||
except RequestConnectionError as ex:
|
||||
raise UpdateFailed from ex
|
||||
except HTTPError:
|
||||
@@ -134,8 +123,6 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
self.fritz.update_devices(ignore_removed=False)
|
||||
if self.has_templates:
|
||||
self.fritz.update_templates(ignore_removed=False)
|
||||
if self.has_triggers:
|
||||
self.fritz.update_triggers(ignore_removed=False)
|
||||
|
||||
devices = self.fritz.get_devices()
|
||||
device_data = {}
|
||||
@@ -169,20 +156,12 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
for template in templates:
|
||||
template_data[template.ain] = template
|
||||
|
||||
trigger_data = {}
|
||||
if self.has_triggers:
|
||||
triggers = self.fritz.get_triggers()
|
||||
for trigger in triggers:
|
||||
trigger_data[trigger.ain] = trigger
|
||||
|
||||
self.new_devices = device_data.keys() - self.data.devices.keys()
|
||||
self.new_templates = template_data.keys() - self.data.templates.keys()
|
||||
self.new_triggers = trigger_data.keys() - self.data.triggers.keys()
|
||||
|
||||
return FritzboxCoordinatorData(
|
||||
devices=device_data,
|
||||
templates=template_data,
|
||||
triggers=trigger_data,
|
||||
supported_color_properties=supported_color_properties,
|
||||
)
|
||||
|
||||
@@ -214,7 +193,6 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
if (
|
||||
self.data.devices.keys() - new_data.devices.keys()
|
||||
or self.data.templates.keys() - new_data.templates.keys()
|
||||
or self.data.triggers.keys() - new_data.triggers.keys()
|
||||
):
|
||||
self.cleanup_removed_devices(new_data)
|
||||
|
||||
|
||||
@@ -4,17 +4,14 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyfritzhome.devicetypes import FritzhomeTrigger
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FritzboxConfigEntry
|
||||
from .entity import FritzBoxDeviceEntity, FritzBoxEntity
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -29,27 +26,21 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
@callback
|
||||
def _add_entities(
|
||||
devices: set[str] | None = None, triggers: set[str] | None = None
|
||||
) -> None:
|
||||
"""Add devices and triggers."""
|
||||
def _add_entities(devices: set[str] | None = None) -> None:
|
||||
"""Add devices."""
|
||||
if devices is None:
|
||||
devices = coordinator.new_devices
|
||||
if triggers is None:
|
||||
triggers = coordinator.new_triggers
|
||||
if not devices and not triggers:
|
||||
if not devices:
|
||||
return
|
||||
entities = [
|
||||
async_add_entities(
|
||||
FritzboxSwitch(coordinator, ain)
|
||||
for ain in devices
|
||||
if coordinator.data.devices[ain].has_switch
|
||||
] + [FritzboxTrigger(coordinator, ain) for ain in triggers]
|
||||
|
||||
async_add_entities(entities)
|
||||
)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_entities))
|
||||
|
||||
_add_entities(set(coordinator.data.devices), set(coordinator.data.triggers))
|
||||
_add_entities(set(coordinator.data.devices))
|
||||
|
||||
|
||||
class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity):
|
||||
@@ -79,42 +70,3 @@ class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="manual_switching_disabled",
|
||||
)
|
||||
|
||||
|
||||
class FritzboxTrigger(FritzBoxEntity, SwitchEntity):
|
||||
"""The switch class for FRITZ!SmartHome triggers."""
|
||||
|
||||
@property
|
||||
def data(self) -> FritzhomeTrigger:
|
||||
"""Return the trigger data entity."""
|
||||
return self.coordinator.data.triggers[self.ain]
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device specific attributes."""
|
||||
return DeviceInfo(
|
||||
name=self.data.name,
|
||||
identifiers={(DOMAIN, self.ain)},
|
||||
configuration_url=self.coordinator.configuration_url,
|
||||
manufacturer="FRITZ!",
|
||||
model="SmartHome Routine",
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the trigger is active."""
|
||||
return self.data.active # type: ignore [no-any-return]
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Activate the trigger."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self.coordinator.fritz.set_trigger_active, self.ain
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Deactivate the trigger."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self.coordinator.fritz.set_trigger_inactive, self.ain
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@@ -2,23 +2,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.stream import (
|
||||
CONF_RTSP_TRANSPORT,
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_AUTHENTICATION, CONF_VERIFY_SSL, Platform
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .const import CONF_FRAMERATE, CONF_LIMIT_REFETCH_TO_URL_CHANGE, SECTION_ADVANCED
|
||||
|
||||
DOMAIN = "generic"
|
||||
PLATFORMS = [Platform.CAMERA]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
@@ -55,38 +47,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
_LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1:
|
||||
# Migrate to advanced section
|
||||
new_options = {**entry.options}
|
||||
advanced = new_options[SECTION_ADVANCED] = {
|
||||
CONF_FRAMERATE: new_options.pop(CONF_FRAMERATE),
|
||||
CONF_VERIFY_SSL: new_options.pop(CONF_VERIFY_SSL),
|
||||
}
|
||||
|
||||
# migrate optional fields
|
||||
for key in (
|
||||
CONF_RTSP_TRANSPORT,
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
CONF_AUTHENTICATION,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE,
|
||||
):
|
||||
if key in new_options:
|
||||
advanced[key] = new_options.pop(key)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, options=new_options, version=2)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -41,7 +41,6 @@ from .const import (
|
||||
CONF_STILL_IMAGE_URL,
|
||||
CONF_STREAM_SOURCE,
|
||||
GET_IMAGE_TIMEOUT,
|
||||
SECTION_ADVANCED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -63,11 +62,9 @@ def generate_auth(device_info: Mapping[str, Any]) -> httpx.Auth | None:
|
||||
"""Generate httpx.Auth object from credentials."""
|
||||
username: str | None = device_info.get(CONF_USERNAME)
|
||||
password: str | None = device_info.get(CONF_PASSWORD)
|
||||
authentication = device_info.get(CONF_AUTHENTICATION)
|
||||
if username and password:
|
||||
if (
|
||||
device_info[SECTION_ADVANCED].get(CONF_AUTHENTICATION)
|
||||
== HTTP_DIGEST_AUTHENTICATION
|
||||
):
|
||||
if authentication == HTTP_DIGEST_AUTHENTICATION:
|
||||
return httpx.DigestAuth(username=username, password=password)
|
||||
return httpx.BasicAuth(username=username, password=password)
|
||||
return None
|
||||
@@ -102,16 +99,14 @@ class GenericCamera(Camera):
|
||||
if self._stream_source:
|
||||
self._stream_source = Template(self._stream_source, hass)
|
||||
self._attr_supported_features = CameraEntityFeature.STREAM
|
||||
self._limit_refetch = device_info[SECTION_ADVANCED].get(
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE, False
|
||||
)
|
||||
self._attr_frame_interval = 1 / device_info[SECTION_ADVANCED][CONF_FRAMERATE]
|
||||
self._limit_refetch = device_info.get(CONF_LIMIT_REFETCH_TO_URL_CHANGE, False)
|
||||
self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE]
|
||||
self.content_type = device_info[CONF_CONTENT_TYPE]
|
||||
self.verify_ssl = device_info[SECTION_ADVANCED][CONF_VERIFY_SSL]
|
||||
if rtsp_transport := device_info[SECTION_ADVANCED].get(CONF_RTSP_TRANSPORT):
|
||||
self.stream_options[CONF_RTSP_TRANSPORT] = rtsp_transport
|
||||
self.verify_ssl = device_info[CONF_VERIFY_SSL]
|
||||
if device_info.get(CONF_RTSP_TRANSPORT):
|
||||
self.stream_options[CONF_RTSP_TRANSPORT] = device_info[CONF_RTSP_TRANSPORT]
|
||||
self._auth = generate_auth(device_info)
|
||||
if device_info[SECTION_ADVANCED].get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
if device_info.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
self.stream_options[CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = True
|
||||
|
||||
self._last_url = None
|
||||
|
||||
@@ -50,17 +50,10 @@ from homeassistant.const import (
|
||||
HTTP_DIGEST_AUTHENTICATION,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template as template_helper
|
||||
from homeassistant.helpers.entity_platform import PlatformData
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .camera import GenericCamera, generate_auth
|
||||
@@ -74,20 +67,17 @@ from .const import (
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
GET_IMAGE_TIMEOUT,
|
||||
SECTION_ADVANCED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_DATA = {
|
||||
CONF_NAME: DEFAULT_NAME,
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_FRAMERATE: 2,
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_RTSP_TRANSPORT: "tcp",
|
||||
},
|
||||
}
|
||||
|
||||
SUPPORTED_IMAGE_TYPES = {"png", "jpeg", "gif", "svg+xml", "webp"}
|
||||
@@ -104,47 +94,58 @@ class InvalidStreamException(HomeAssistantError):
|
||||
|
||||
|
||||
def build_schema(
|
||||
user_input: Mapping[str, Any],
|
||||
is_options_flow: bool = False,
|
||||
show_advanced_options: bool = False,
|
||||
) -> vol.Schema:
|
||||
"""Create schema for camera config setup."""
|
||||
rtsp_options = [
|
||||
SelectOptionDict(
|
||||
value=value,
|
||||
label=name,
|
||||
)
|
||||
for value, name in RTSP_TRANSPORTS.items()
|
||||
]
|
||||
|
||||
advanced_section = {
|
||||
vol.Required(CONF_FRAMERATE): vol.All(
|
||||
vol.Range(min=0, min_included=False), cv.positive_float
|
||||
),
|
||||
vol.Required(CONF_VERIFY_SSL): bool,
|
||||
vol.Optional(CONF_RTSP_TRANSPORT): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=rtsp_options,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_AUTHENTICATION): vol.In(
|
||||
[HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]
|
||||
),
|
||||
}
|
||||
spec = {
|
||||
vol.Optional(CONF_STREAM_SOURCE): str,
|
||||
vol.Optional(CONF_STILL_IMAGE_URL): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
vol.Required(SECTION_ADVANCED): section(
|
||||
vol.Schema(advanced_section), {"collapsed": True}
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_STILL_IMAGE_URL,
|
||||
description={"suggested_value": user_input.get(CONF_STILL_IMAGE_URL, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_STREAM_SOURCE,
|
||||
description={"suggested_value": user_input.get(CONF_STREAM_SOURCE, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_RTSP_TRANSPORT,
|
||||
description={"suggested_value": user_input.get(CONF_RTSP_TRANSPORT)},
|
||||
): vol.In(RTSP_TRANSPORTS),
|
||||
vol.Optional(
|
||||
CONF_AUTHENTICATION,
|
||||
description={"suggested_value": user_input.get(CONF_AUTHENTICATION)},
|
||||
): vol.In([HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]),
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
description={"suggested_value": user_input.get(CONF_USERNAME, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
description={"suggested_value": user_input.get(CONF_PASSWORD, "")},
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_FRAMERATE,
|
||||
description={"suggested_value": user_input.get(CONF_FRAMERATE, 2)},
|
||||
): vol.All(vol.Range(min=0, min_included=False), cv.positive_float),
|
||||
vol.Required(
|
||||
CONF_VERIFY_SSL, default=user_input.get(CONF_VERIFY_SSL, True)
|
||||
): bool,
|
||||
}
|
||||
if is_options_flow:
|
||||
advanced_section[vol.Optional(CONF_LIMIT_REFETCH_TO_URL_CHANGE)] = bool
|
||||
spec[
|
||||
vol.Required(
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE,
|
||||
default=user_input.get(CONF_LIMIT_REFETCH_TO_URL_CHANGE, False),
|
||||
)
|
||||
] = bool
|
||||
if show_advanced_options:
|
||||
advanced_section[vol.Optional(CONF_USE_WALLCLOCK_AS_TIMESTAMPS)] = bool
|
||||
|
||||
spec[
|
||||
vol.Required(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
default=user_input.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False),
|
||||
)
|
||||
] = bool
|
||||
return vol.Schema(spec)
|
||||
|
||||
|
||||
@@ -186,7 +187,7 @@ async def async_test_still(
|
||||
return {CONF_STILL_IMAGE_URL: "malformed_url"}, None
|
||||
if not yarl_url.is_absolute():
|
||||
return {CONF_STILL_IMAGE_URL: "relative_url"}, None
|
||||
verify_ssl = info[SECTION_ADVANCED][CONF_VERIFY_SSL]
|
||||
verify_ssl = info[CONF_VERIFY_SSL]
|
||||
auth = generate_auth(info)
|
||||
try:
|
||||
async_client = get_async_client(hass, verify_ssl=verify_ssl)
|
||||
@@ -267,9 +268,9 @@ async def async_test_and_preview_stream(
|
||||
_LOGGER.warning("Problem rendering template %s: %s", stream_source, err)
|
||||
raise InvalidStreamException("template_error") from err
|
||||
stream_options: dict[str, str | bool | float] = {}
|
||||
if rtsp_transport := info[SECTION_ADVANCED].get(CONF_RTSP_TRANSPORT):
|
||||
if rtsp_transport := info.get(CONF_RTSP_TRANSPORT):
|
||||
stream_options[CONF_RTSP_TRANSPORT] = rtsp_transport
|
||||
if info[SECTION_ADVANCED].get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
if info.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
stream_options[CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = True
|
||||
|
||||
try:
|
||||
@@ -325,7 +326,7 @@ def register_still_preview(hass: HomeAssistant) -> None:
|
||||
class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for generic IP camera."""
|
||||
|
||||
VERSION = 2
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Generic ConfigFlow."""
|
||||
@@ -380,7 +381,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
user_input = DEFAULT_DATA.copy()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(build_schema(), user_input),
|
||||
data_schema=build_schema(user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -448,19 +449,13 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
if (
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS
|
||||
not in user_input[SECTION_ADVANCED]
|
||||
):
|
||||
data[SECTION_ADVANCED][CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = (
|
||||
self.config_entry.options[SECTION_ADVANCED].get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
)
|
||||
)
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_image_settings = data
|
||||
@@ -469,13 +464,11 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
user_input = self.user_input
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
build_schema(
|
||||
data_schema=build_schema(
|
||||
user_input or self.config_entry.options,
|
||||
True,
|
||||
self.show_advanced_options,
|
||||
),
|
||||
user_input or self.config_entry.options,
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -9,4 +9,3 @@ CONF_STILL_IMAGE_URL = "still_image_url"
|
||||
CONF_STREAM_SOURCE = "stream_source"
|
||||
CONF_FRAMERATE = "framerate"
|
||||
GET_IMAGE_TIMEOUT = 10
|
||||
SECTION_ADVANCED = "advanced"
|
||||
|
||||
@@ -25,25 +25,18 @@
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"still_image_url": "Still image URL (e.g. http://...)",
|
||||
"stream_source": "Stream source URL (e.g. rtsp://...)",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"authentication": "Authentication",
|
||||
"framerate": "Frame rate (Hz)",
|
||||
"limit_refetch_to_url_change": "Limit refetch to URL change",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"rtsp_transport": "RTSP transport protocol",
|
||||
"still_image_url": "Still image URL (e.g. http://...)",
|
||||
"stream_source": "Stream source URL (e.g. rtsp://...)",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"description": "Advanced settings are only needed for special cases. Leave them unchanged unless you know what you are doing.",
|
||||
"name": "Advanced settings"
|
||||
}
|
||||
}
|
||||
"description": "Enter the settings to connect to the camera."
|
||||
},
|
||||
"user_confirm": {
|
||||
"data": {
|
||||
@@ -77,27 +70,19 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"authentication": "[%key:component::generic::config::step::user::data::authentication%]",
|
||||
"framerate": "[%key:component::generic::config::step::user::data::framerate%]",
|
||||
"limit_refetch_to_url_change": "[%key:component::generic::config::step::user::data::limit_refetch_to_url_change%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"rtsp_transport": "[%key:component::generic::config::step::user::data::rtsp_transport%]",
|
||||
"still_image_url": "[%key:component::generic::config::step::user::data::still_image_url%]",
|
||||
"stream_source": "[%key:component::generic::config::step::user::data::stream_source%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"authentication": "[%key:component::generic::config::step::user::sections::advanced::data::authentication%]",
|
||||
"framerate": "[%key:component::generic::config::step::user::sections::advanced::data::framerate%]",
|
||||
"limit_refetch_to_url_change": "[%key:component::generic::config::step::user::sections::advanced::data::limit_refetch_to_url_change%]",
|
||||
"rtsp_transport": "[%key:component::generic::config::step::user::sections::advanced::data::rtsp_transport%]",
|
||||
"use_wallclock_as_timestamps": "Use wallclock as timestamps",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
|
||||
},
|
||||
"description": "[%key:component::generic::config::step::user::sections::advanced::description%]",
|
||||
"name": "[%key:component::generic::config::step::user::sections::advanced::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user_confirm": {
|
||||
|
||||
@@ -8,4 +8,4 @@ HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
# When changing this version, also update the corresponding SHA hash (_GO2RTC_SHA)
|
||||
# in script/hassfest/docker.py.
|
||||
RECOMMENDED_VERSION = "1.9.13"
|
||||
RECOMMENDED_VERSION = "1.9.12"
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==10.0.0"],
|
||||
"requirements": ["python-homewizard-energy==9.3.0"],
|
||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homewizard_energy.models import Batteries
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homewizard_energy import HomeWizardEnergy
|
||||
from homewizard_energy.models import Batteries, CombinedModels as DeviceResponseEntry
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -16,59 +21,69 @@ from .helpers import homewizard_exception_handler
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class HomeWizardSelectEntityDescription(SelectEntityDescription):
|
||||
"""Class describing HomeWizard select entities."""
|
||||
|
||||
available_fn: Callable[[DeviceResponseEntry], bool]
|
||||
create_fn: Callable[[DeviceResponseEntry], bool]
|
||||
current_fn: Callable[[DeviceResponseEntry], str | None]
|
||||
set_fn: Callable[[HomeWizardEnergy, str], Awaitable[Any]]
|
||||
|
||||
|
||||
DESCRIPTIONS = [
|
||||
HomeWizardSelectEntityDescription(
|
||||
key="battery_group_mode",
|
||||
translation_key="battery_group_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
options=[Batteries.Mode.ZERO, Batteries.Mode.STANDBY, Batteries.Mode.TO_FULL],
|
||||
available_fn=lambda x: x.batteries is not None,
|
||||
create_fn=lambda x: x.batteries is not None,
|
||||
current_fn=lambda x: x.batteries.mode if x.batteries else None,
|
||||
set_fn=lambda api, mode: api.batteries(mode=Batteries.Mode(mode)),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeWizardConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up HomeWizard select based on a config entry."""
|
||||
if entry.runtime_data.data.device.supports_batteries():
|
||||
async_add_entities(
|
||||
[
|
||||
HomeWizardBatteryModeSelectEntity(
|
||||
HomeWizardSelectEntity(
|
||||
coordinator=entry.runtime_data,
|
||||
description=description,
|
||||
)
|
||||
]
|
||||
for description in DESCRIPTIONS
|
||||
if description.create_fn(entry.runtime_data.data)
|
||||
)
|
||||
|
||||
|
||||
class HomeWizardBatteryModeSelectEntity(HomeWizardEntity, SelectEntity):
|
||||
class HomeWizardSelectEntity(HomeWizardEntity, SelectEntity):
|
||||
"""Defines a HomeWizard select entity."""
|
||||
|
||||
entity_description: SelectEntityDescription
|
||||
entity_description: HomeWizardSelectEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HWEnergyDeviceUpdateCoordinator,
|
||||
description: HomeWizardSelectEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
description = SelectEntityDescription(
|
||||
key="battery_group_mode",
|
||||
translation_key="battery_group_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
options=[
|
||||
str(mode)
|
||||
for mode in (coordinator.data.device.supported_battery_modes() or [])
|
||||
],
|
||||
)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the selected entity option to represent the entity state."""
|
||||
return (
|
||||
self.coordinator.data.batteries.mode
|
||||
if self.coordinator.data.batteries and self.coordinator.data.batteries.mode
|
||||
else None
|
||||
)
|
||||
return self.entity_description.current_fn(self.coordinator.data)
|
||||
|
||||
@homewizard_exception_handler
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self.coordinator.api.batteries(Batteries.Mode(option))
|
||||
await self.entity_description.set_fn(self.coordinator.api, option)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@@ -65,9 +65,7 @@
|
||||
"state": {
|
||||
"standby": "Standby",
|
||||
"to_full": "Manual charge mode",
|
||||
"zero": "Zero mode",
|
||||
"zero_charge_only": "Zero mode (charge only)",
|
||||
"zero_discharge_only": "Zero mode (discharge only)"
|
||||
"zero": "Zero mode"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -20,5 +20,13 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:toggle-switch"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"trigger": "mdi:toggle-switch-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:toggle-switch"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted input booleans to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::input_boolean::title%]",
|
||||
@@ -17,6 +21,15 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"description": "Reloads helpers from the YAML-configuration.",
|
||||
@@ -35,5 +48,27 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Input boolean"
|
||||
"title": "Input boolean",
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers after one or more input booleans turn off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::input_boolean::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::input_boolean::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Input boolean turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers after one or more input booleans turn on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::input_boolean::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::input_boolean::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Input boolean turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
17
homeassistant/components/input_boolean/trigger.py
Normal file
17
homeassistant/components/input_boolean/trigger.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides triggers for input booleans."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_target_state_trigger
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_on": make_entity_target_state_trigger(DOMAIN, STATE_ON),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, STATE_OFF),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for input booleans."""
|
||||
return TRIGGERS
|
||||
18
homeassistant/components/input_boolean/triggers.yaml
Normal file
18
homeassistant/components/input_boolean/triggers.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: input_boolean
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@dgomes"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/kmtronic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pykmtronic"],
|
||||
"requirements": ["pykmtronic==0.3.0"]
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"codeowners": ["@OnFreund"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/kodi",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["jsonrpc_async", "jsonrpc_base", "jsonrpc_websocket", "pykodi"],
|
||||
"requirements": ["pykodi==0.2.7"],
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@stegm"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/kostal_plenticore",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["kostal"],
|
||||
"requirements": ["pykoplenti==1.3.0"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@eifinger"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/kraken",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["krakenex", "pykrakenapi"],
|
||||
"requirements": ["krakenex==2.2.2", "pykrakenapi==0.1.8"]
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/kulersky",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bleak", "pykulersky"],
|
||||
"requirements": ["pykulersky==0.5.8"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@IceBotYT"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lacrosse_view",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["lacrosse_view"],
|
||||
"requirements": ["lacrosse-view==1.1.1"]
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["usb"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/landisgyr_heat_meter",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ultraheat-api==0.5.7"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@joostlek"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lastfm",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pylast"],
|
||||
"requirements": ["pylast==5.1.0"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@xLarry"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/laundrify",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["laundrify-aio==1.2.2"]
|
||||
}
|
||||
|
||||
@@ -19,8 +19,8 @@ from .const import CONF_DOMAIN_DATA
|
||||
from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 2
|
||||
SCAN_INTERVAL = timedelta(minutes=10)
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
|
||||
@@ -36,7 +36,7 @@ from .const import (
|
||||
from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 2
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ from .const import (
|
||||
from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 2
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
|
||||
@@ -33,8 +33,8 @@ from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
BRIGHTNESS_SCALE = (1, 100)
|
||||
|
||||
PARALLEL_UPDATES = 2
|
||||
SCAN_INTERVAL = timedelta(minutes=10)
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
|
||||
@@ -6,9 +6,8 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pypck==0.9.8", "lcn-frontend==0.2.7"]
|
||||
"requirements": ["pypck==0.9.7", "lcn-frontend==0.2.7"]
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ from .const import (
|
||||
from .entity import LcnEntity
|
||||
from .helpers import LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 2
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
|
||||
@@ -40,7 +40,7 @@ from .const import (
|
||||
from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 2
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
|
||||
@@ -17,8 +17,8 @@ from .const import CONF_DOMAIN_DATA, CONF_OUTPUT, OUTPUT_PORTS, RELAY_PORTS, SET
|
||||
from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 2
|
||||
SCAN_INTERVAL = timedelta(minutes=10)
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_switch_entities(
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/leaone",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["leaone-ble==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -34,7 +34,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.28.4", "led-ble==1.1.7"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lg_soundbar",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["temescal"],
|
||||
"requirements": ["temescal==0.5"]
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -242,7 +241,6 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity):
|
||||
# If device is off, turn on first.
|
||||
if not self.data.is_on:
|
||||
await self.async_turn_on()
|
||||
await asyncio.sleep(2)
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_set_hvac_mode: %s",
|
||||
@@ -326,11 +324,10 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity):
|
||||
# If device is off, turn on first.
|
||||
if not self.data.is_on:
|
||||
await self.async_turn_on()
|
||||
await asyncio.sleep(2)
|
||||
|
||||
if hvac_mode and hvac_mode != self.hvac_mode:
|
||||
await self.async_set_hvac_mode(HVACMode(hvac_mode))
|
||||
await asyncio.sleep(2)
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_set_temperature: %s",
|
||||
self.coordinator.device_name,
|
||||
|
||||
@@ -3,13 +3,8 @@
|
||||
"name": "LG ThinQ",
|
||||
"codeowners": ["@LG-ThinQ-Integration"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"macaddress": "34E6E6*"
|
||||
}
|
||||
],
|
||||
"dhcp": [{ "macaddress": "34E6E6*" }],
|
||||
"documentation": "https://www.home-assistant.io/integrations/lg_thinq",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["thinqconnect"],
|
||||
"requirements": ["thinqconnect==1.0.9"]
|
||||
|
||||
@@ -49,7 +49,6 @@
|
||||
"LIFX Z"
|
||||
]
|
||||
},
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiolifx", "aiolifx_effects", "bitstring"],
|
||||
"requirements": [
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@StefanIacobLivisi", "@planbnet"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/livisi",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["livisi==0.0.25"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/loqed",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["loqedAPI==2.1.10"],
|
||||
"zeroconf": [
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@majuss", "@suaveolent"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lupusec",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["lupupy"],
|
||||
"requirements": ["lupupy==0.3.2"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@cdheiser", "@wilburCForce"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lutron",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pylutron"],
|
||||
"requirements": ["pylutron==0.2.18"],
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/lyric",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiolyric"],
|
||||
"requirements": ["aiolyric==2.0.2"]
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/mailgun",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pymailgunner"],
|
||||
"requirements": ["pymailgunner==1.4"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Sotolotl", "@emontnemery"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/meater",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["meater-python==0.0.8"]
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/medcom_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["medcom-ble==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/melnor",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["melnor-bluetooth==0.0.25"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "meteo_france",
|
||||
"name": "M\u00e9t\u00e9o-France",
|
||||
"name": "Météo-France",
|
||||
"codeowners": ["@hacf-fr", "@oncleben31", "@Quentame"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/meteo_france",
|
||||
|
||||
@@ -88,7 +88,6 @@ UNSUPPORTED_EXTENDED_CACHE_RETENTION_MODELS: list[str] = [
|
||||
"o4",
|
||||
"gpt-3.5",
|
||||
"gpt-4-turbo",
|
||||
"gpt-4o",
|
||||
"gpt-5-mini",
|
||||
"gpt-5-nano",
|
||||
]
|
||||
|
||||
@@ -37,12 +37,10 @@ from .const import (
|
||||
PLATFORMS,
|
||||
)
|
||||
from .coordinator import (
|
||||
RoborockB01Q7UpdateCoordinator,
|
||||
RoborockConfigEntry,
|
||||
RoborockCoordinators,
|
||||
RoborockDataUpdateCoordinator,
|
||||
RoborockDataUpdateCoordinatorA01,
|
||||
RoborockDataUpdateCoordinatorB01,
|
||||
RoborockWashingMachineUpdateCoordinator,
|
||||
RoborockWetDryVacUpdateCoordinator,
|
||||
)
|
||||
@@ -133,18 +131,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
for coord in coordinators
|
||||
if isinstance(coord, RoborockDataUpdateCoordinatorA01)
|
||||
]
|
||||
b01_coords = [
|
||||
coord
|
||||
for coord in coordinators
|
||||
if isinstance(coord, RoborockDataUpdateCoordinatorB01)
|
||||
]
|
||||
if len(v1_coords) + len(a01_coords) + len(b01_coords) == 0:
|
||||
if len(v1_coords) + len(a01_coords) == 0:
|
||||
raise ConfigEntryNotReady(
|
||||
"No devices were able to successfully setup",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_coordinators",
|
||||
)
|
||||
entry.runtime_data = RoborockCoordinators(v1_coords, a01_coords, b01_coords)
|
||||
entry.runtime_data = RoborockCoordinators(v1_coords, a01_coords)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -215,17 +208,12 @@ def build_setup_functions(
|
||||
Coroutine[
|
||||
Any,
|
||||
Any,
|
||||
RoborockDataUpdateCoordinator
|
||||
| RoborockDataUpdateCoordinatorA01
|
||||
| RoborockDataUpdateCoordinatorB01
|
||||
| None,
|
||||
RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01 | None,
|
||||
]
|
||||
]:
|
||||
"""Create a list of setup functions that can later be called asynchronously."""
|
||||
coordinators: list[
|
||||
RoborockDataUpdateCoordinator
|
||||
| RoborockDataUpdateCoordinatorA01
|
||||
| RoborockDataUpdateCoordinatorB01
|
||||
RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01
|
||||
] = []
|
||||
for device in devices:
|
||||
_LOGGER.debug("Creating device %s: %s", device.name, device)
|
||||
@@ -241,12 +229,6 @@ def build_setup_functions(
|
||||
coordinators.append(
|
||||
RoborockWashingMachineUpdateCoordinator(hass, entry, device, device.zeo)
|
||||
)
|
||||
elif device.b01_q7_properties is not None:
|
||||
coordinators.append(
|
||||
RoborockB01Q7UpdateCoordinator(
|
||||
hass, entry, device, device.b01_q7_properties
|
||||
)
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Not adding device %s because its protocol version %s or category %s is not supported",
|
||||
@@ -259,15 +241,8 @@ def build_setup_functions(
|
||||
|
||||
|
||||
async def setup_coordinator(
|
||||
coordinator: RoborockDataUpdateCoordinator
|
||||
| RoborockDataUpdateCoordinatorA01
|
||||
| RoborockDataUpdateCoordinatorB01,
|
||||
) -> (
|
||||
RoborockDataUpdateCoordinator
|
||||
| RoborockDataUpdateCoordinatorA01
|
||||
| RoborockDataUpdateCoordinatorB01
|
||||
| None
|
||||
):
|
||||
coordinator: RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01,
|
||||
) -> RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01 | None:
|
||||
"""Set up a single coordinator."""
|
||||
try:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
@@ -8,18 +8,12 @@ import logging
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from propcache.api import cached_property
|
||||
from roborock import B01Props
|
||||
from roborock.data import HomeDataScene
|
||||
from roborock.devices.device import RoborockDevice
|
||||
from roborock.devices.traits.a01 import DyadApi, ZeoApi
|
||||
from roborock.devices.traits.b01 import Q7PropertiesApi
|
||||
from roborock.devices.traits.v1 import PropertiesApi
|
||||
from roborock.exceptions import RoborockDeviceBusy, RoborockException
|
||||
from roborock.roborock_message import (
|
||||
RoborockB01Props,
|
||||
RoborockDyadDataProtocol,
|
||||
RoborockZeoProtocol,
|
||||
)
|
||||
from roborock.roborock_message import RoborockDyadDataProtocol, RoborockZeoProtocol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_CONNECTIONS
|
||||
@@ -64,17 +58,12 @@ class RoborockCoordinators:
|
||||
|
||||
v1: list[RoborockDataUpdateCoordinator]
|
||||
a01: list[RoborockDataUpdateCoordinatorA01]
|
||||
b01: list[RoborockDataUpdateCoordinatorB01]
|
||||
|
||||
def values(
|
||||
self,
|
||||
) -> list[
|
||||
RoborockDataUpdateCoordinator
|
||||
| RoborockDataUpdateCoordinatorA01
|
||||
| RoborockDataUpdateCoordinatorB01
|
||||
]:
|
||||
) -> list[RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01]:
|
||||
"""Return all coordinators."""
|
||||
return self.v1 + self.a01 + self.b01
|
||||
return self.v1 + self.a01
|
||||
|
||||
|
||||
type RoborockConfigEntry = ConfigEntry[RoborockCoordinators]
|
||||
@@ -480,91 +469,3 @@ class RoborockWetDryVacUpdateCoordinator(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_data_fail",
|
||||
) from ex
|
||||
|
||||
|
||||
class RoborockDataUpdateCoordinatorB01(DataUpdateCoordinator[B01Props]):
|
||||
"""Class to manage fetching data from the API for B01 devices."""
|
||||
|
||||
config_entry: RoborockConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: RoborockConfigEntry,
|
||||
device: RoborockDevice,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=A01_UPDATE_INTERVAL,
|
||||
)
|
||||
self._device = device
|
||||
self.device_info = DeviceInfo(
|
||||
name=device.name,
|
||||
identifiers={(DOMAIN, device.duid)},
|
||||
manufacturer="Roborock",
|
||||
model=device.product.model,
|
||||
sw_version=device.device_info.fv,
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def duid(self) -> str:
|
||||
"""Get the unique id of the device as specified by Roborock."""
|
||||
return self._device.duid
|
||||
|
||||
@cached_property
|
||||
def duid_slug(self) -> str:
|
||||
"""Get the slug of the duid."""
|
||||
return slugify(self.duid)
|
||||
|
||||
@property
|
||||
def device(self) -> RoborockDevice:
|
||||
"""Get the RoborockDevice."""
|
||||
return self._device
|
||||
|
||||
|
||||
class RoborockB01Q7UpdateCoordinator(RoborockDataUpdateCoordinatorB01):
|
||||
"""Coordinator for B01 Q7 devices."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: RoborockConfigEntry,
|
||||
device: RoborockDevice,
|
||||
api: Q7PropertiesApi,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(hass, config_entry, device)
|
||||
self.api = api
|
||||
self.request_protocols: list[RoborockB01Props] = [
|
||||
RoborockB01Props.STATUS,
|
||||
RoborockB01Props.MAIN_BRUSH,
|
||||
RoborockB01Props.SIDE_BRUSH,
|
||||
RoborockB01Props.DUST_BAG_USED,
|
||||
RoborockB01Props.MOP_LIFE,
|
||||
RoborockB01Props.MAIN_SENSOR,
|
||||
RoborockB01Props.CLEANING_TIME,
|
||||
RoborockB01Props.REAL_CLEAN_TIME,
|
||||
RoborockB01Props.HYPA,
|
||||
]
|
||||
|
||||
async def _async_update_data(
|
||||
self,
|
||||
) -> B01Props:
|
||||
try:
|
||||
data = await self.api.query_values(self.request_protocols)
|
||||
except RoborockException as ex:
|
||||
_LOGGER.debug("Failed to update Q7 data: %s", ex)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_data_fail",
|
||||
) from ex
|
||||
if data is None:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_data_fail",
|
||||
)
|
||||
return data
|
||||
|
||||
@@ -13,11 +13,7 @@ from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
RoborockDataUpdateCoordinator,
|
||||
RoborockDataUpdateCoordinatorA01,
|
||||
RoborockDataUpdateCoordinatorB01,
|
||||
)
|
||||
from .coordinator import RoborockDataUpdateCoordinator, RoborockDataUpdateCoordinatorA01
|
||||
|
||||
|
||||
class RoborockEntity(Entity):
|
||||
@@ -128,23 +124,3 @@ class RoborockCoordinatedEntityA01(
|
||||
)
|
||||
CoordinatorEntity.__init__(self, coordinator=coordinator)
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
|
||||
class RoborockCoordinatedEntityB01(
|
||||
RoborockEntity, CoordinatorEntity[RoborockDataUpdateCoordinatorB01]
|
||||
):
|
||||
"""Representation of coordinated Roborock Entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str,
|
||||
coordinator: RoborockDataUpdateCoordinatorB01,
|
||||
) -> None:
|
||||
"""Initialize the coordinated Roborock Device."""
|
||||
RoborockEntity.__init__(
|
||||
self,
|
||||
unique_id=unique_id,
|
||||
device_info=coordinator.device_info,
|
||||
)
|
||||
CoordinatorEntity.__init__(self, coordinator=coordinator)
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
@@ -8,14 +8,12 @@ import datetime
|
||||
import logging
|
||||
|
||||
from roborock.data import (
|
||||
B01Props,
|
||||
DyadError,
|
||||
RoborockDockErrorCode,
|
||||
RoborockDockTypeCode,
|
||||
RoborockDyadStateCode,
|
||||
RoborockErrorCode,
|
||||
RoborockStateCode,
|
||||
WorkStatusMapping,
|
||||
ZeoError,
|
||||
ZeoState,
|
||||
)
|
||||
@@ -36,11 +34,9 @@ from .coordinator import (
|
||||
RoborockConfigEntry,
|
||||
RoborockDataUpdateCoordinator,
|
||||
RoborockDataUpdateCoordinatorA01,
|
||||
RoborockDataUpdateCoordinatorB01,
|
||||
)
|
||||
from .entity import (
|
||||
RoborockCoordinatedEntityA01,
|
||||
RoborockCoordinatedEntityB01,
|
||||
RoborockCoordinatedEntityV1,
|
||||
RoborockEntity,
|
||||
)
|
||||
@@ -68,13 +64,6 @@ class RoborockSensorDescriptionA01(SensorEntityDescription):
|
||||
data_protocol: RoborockDyadDataProtocol | RoborockZeoProtocol
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class RoborockSensorDescriptionB01(SensorEntityDescription):
|
||||
"""A class that describes Roborock B01 sensors."""
|
||||
|
||||
value_fn: Callable[[B01Props], StateType]
|
||||
|
||||
|
||||
def _dock_error_value_fn(state: DeviceState) -> str | None:
|
||||
if (
|
||||
status := state.status.dock_error_status
|
||||
@@ -337,71 +326,6 @@ A01_SENSOR_DESCRIPTIONS: list[RoborockSensorDescriptionA01] = [
|
||||
),
|
||||
]
|
||||
|
||||
Q7_B01_SENSOR_DESCRIPTIONS = [
|
||||
RoborockSensorDescriptionB01(
|
||||
key="q7_status",
|
||||
value_fn=lambda data: data.status_name,
|
||||
translation_key="q7_status",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=WorkStatusMapping.keys(),
|
||||
),
|
||||
RoborockSensorDescriptionB01(
|
||||
key="main_brush_time_left",
|
||||
value_fn=lambda data: data.main_brush_time_left,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
translation_key="main_brush_time_left",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
RoborockSensorDescriptionB01(
|
||||
key="side_brush_time_left",
|
||||
value_fn=lambda data: data.side_brush_time_left,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
translation_key="side_brush_time_left",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
RoborockSensorDescriptionB01(
|
||||
key="filter_time_left",
|
||||
value_fn=lambda data: data.filter_time_left,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
translation_key="filter_time_left",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
RoborockSensorDescriptionB01(
|
||||
key="sensor_time_left",
|
||||
value_fn=lambda data: data.sensor_dirty_time_left,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
translation_key="sensor_time_left",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
RoborockSensorDescriptionB01(
|
||||
key="mop_life_time_left",
|
||||
value_fn=lambda data: data.mop_life_time_left,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
translation_key="mop_life_time_left",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
RoborockSensorDescriptionB01(
|
||||
key="total_cleaning_time",
|
||||
value_fn=lambda data: data.real_clean_time,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
translation_key="total_cleaning_time",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -430,12 +354,6 @@ async def async_setup_entry(
|
||||
for description in A01_SENSOR_DESCRIPTIONS
|
||||
if description.data_protocol in coordinator.request_protocols
|
||||
)
|
||||
entities.extend(
|
||||
RoborockSensorEntityB01(coordinator, description)
|
||||
for coordinator in coordinators.b01
|
||||
for description in Q7_B01_SENSOR_DESCRIPTIONS
|
||||
if description.value_fn(coordinator.data) is not None
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -522,23 +440,3 @@ class RoborockSensorEntityA01(RoborockCoordinatedEntityA01, SensorEntity):
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the value reported by the sensor."""
|
||||
return self.coordinator.data[self.entity_description.data_protocol]
|
||||
|
||||
|
||||
class RoborockSensorEntityB01(RoborockCoordinatedEntityB01, SensorEntity):
|
||||
"""Representation of a B01 Roborock sensor."""
|
||||
|
||||
entity_description: RoborockSensorDescriptionB01
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: RoborockDataUpdateCoordinatorB01,
|
||||
description: RoborockSensorDescriptionB01,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.entity_description = description
|
||||
super().__init__(f"{description.key}_{coordinator.duid_slug}", coordinator)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the value reported by the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
|
||||
@@ -213,25 +213,6 @@
|
||||
"mop_drying_remaining_time": {
|
||||
"name": "Mop drying remaining time"
|
||||
},
|
||||
"mop_life_time_left": {
|
||||
"name": "Mop life time left"
|
||||
},
|
||||
"q7_status": {
|
||||
"name": "Status",
|
||||
"state": {
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"docking": "[%key:component::roborock::entity::sensor::status::state::docking%]",
|
||||
"mop_airdrying": "Mop air drying",
|
||||
"mop_cleaning": "Mop cleaning",
|
||||
"moping": "Mopping",
|
||||
"paused": "[%key:common::state::paused%]",
|
||||
"sleeping": "Sleeping",
|
||||
"sweep_moping": "Sweep mopping",
|
||||
"sweep_moping_2": "Sweep mopping",
|
||||
"updating": "[%key:component::roborock::entity::sensor::status::state::updating%]",
|
||||
"waiting_for_orders": "Waiting for orders"
|
||||
}
|
||||
},
|
||||
"sensor_time_left": {
|
||||
"name": "Sensor time left"
|
||||
},
|
||||
|
||||
@@ -537,12 +537,6 @@
|
||||
"voltmeter_value": {
|
||||
"name": "Voltmeter value"
|
||||
},
|
||||
"voltmeter_value_with_channel_name": {
|
||||
"name": "Voltmeter value {channel_name}"
|
||||
},
|
||||
"voltmeter_with_channel_name": {
|
||||
"name": "Voltmeter {channel_name}"
|
||||
},
|
||||
"water_consumption": {
|
||||
"name": "Water consumption"
|
||||
},
|
||||
|
||||
@@ -68,7 +68,7 @@ class _SwingModeWrapper(DeviceWrapper):
|
||||
on_off: DPCodeBooleanWrapper | None = None
|
||||
horizontal: DPCodeBooleanWrapper | None = None
|
||||
vertical: DPCodeBooleanWrapper | None = None
|
||||
options: list[str]
|
||||
modes: list[str]
|
||||
|
||||
@classmethod
|
||||
def find_dpcode(cls, device: CustomerDevice) -> Self | None:
|
||||
@@ -83,18 +83,18 @@ class _SwingModeWrapper(DeviceWrapper):
|
||||
device, DPCode.SWITCH_VERTICAL, prefer_function=True
|
||||
)
|
||||
if on_off or horizontal or vertical:
|
||||
options = [SWING_OFF]
|
||||
modes = [SWING_OFF]
|
||||
if on_off:
|
||||
options.append(SWING_ON)
|
||||
modes.append(SWING_ON)
|
||||
if horizontal:
|
||||
options.append(SWING_HORIZONTAL)
|
||||
modes.append(SWING_HORIZONTAL)
|
||||
if vertical:
|
||||
options.append(SWING_VERTICAL)
|
||||
modes.append(SWING_VERTICAL)
|
||||
return cls(
|
||||
on_off=on_off,
|
||||
horizontal=horizontal,
|
||||
vertical=vertical,
|
||||
options=options,
|
||||
modes=modes,
|
||||
)
|
||||
return None
|
||||
|
||||
@@ -403,7 +403,7 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
# Determine swing modes
|
||||
if swing_wrapper:
|
||||
self._attr_supported_features |= ClimateEntityFeature.SWING_MODE
|
||||
self._attr_swing_modes = swing_wrapper.options
|
||||
self._attr_swing_modes = swing_wrapper.modes
|
||||
|
||||
if switch_wrapper:
|
||||
self._attr_supported_features |= (
|
||||
|
||||
@@ -31,12 +31,10 @@ from .models import (
|
||||
class _DPCodeEventWrapper(DPCodeTypeInformationWrapper):
|
||||
"""Base class for Tuya event wrappers."""
|
||||
|
||||
options: list[str]
|
||||
|
||||
def __init__(self, dpcode: str, type_information: Any) -> None:
|
||||
"""Init _DPCodeEventWrapper."""
|
||||
super().__init__(dpcode, type_information)
|
||||
self.options = ["triggered"]
|
||||
@property
|
||||
def event_types(self) -> list[str]:
|
||||
"""Return the event types for the DP code."""
|
||||
return ["triggered"]
|
||||
|
||||
def get_event_type(
|
||||
self, device: CustomerDevice, updated_status_properties: list[str] | None
|
||||
@@ -57,6 +55,11 @@ class _DPCodeEventWrapper(DPCodeTypeInformationWrapper):
|
||||
class _EventEnumWrapper(DPCodeEnumWrapper, _DPCodeEventWrapper):
|
||||
"""Wrapper for event enum DP codes."""
|
||||
|
||||
@property
|
||||
def event_types(self) -> list[str]:
|
||||
"""Return the event types for the enum."""
|
||||
return self.options
|
||||
|
||||
def get_event_type(
|
||||
self, device: CustomerDevice, updated_status_properties: list[str] | None
|
||||
) -> str | None:
|
||||
@@ -229,7 +232,7 @@ class TuyaEventEntity(TuyaEntity, EventEntity):
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._dpcode_wrapper = dpcode_wrapper
|
||||
self._attr_event_types = dpcode_wrapper.options
|
||||
self._attr_event_types = dpcode_wrapper.event_types
|
||||
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
|
||||
@@ -128,9 +128,9 @@ class TuyaVacuumEntity(TuyaEntity, StateVacuumEntity):
|
||||
self._switch_wrapper = switch_wrapper
|
||||
|
||||
self._attr_fan_speed_list = []
|
||||
self._attr_supported_features = VacuumEntityFeature.SEND_COMMAND
|
||||
if status_wrapper or pause_wrapper:
|
||||
self._attr_supported_features |= VacuumEntityFeature.STATE
|
||||
self._attr_supported_features = (
|
||||
VacuumEntityFeature.SEND_COMMAND | VacuumEntityFeature.STATE
|
||||
)
|
||||
if pause_wrapper:
|
||||
self._attr_supported_features |= VacuumEntityFeature.PAUSE
|
||||
|
||||
|
||||
@@ -453,7 +453,6 @@ EVENT_SENSORS: tuple[ProtectBinaryEventEntityDescription, ...] = (
|
||||
ProtectBinaryEventEntityDescription(
|
||||
key="smart_audio_cmonx",
|
||||
translation_key="co_alarm_detected",
|
||||
device_class=BinarySensorDeviceClass.CO,
|
||||
ufp_required_field="can_detect_co",
|
||||
ufp_enabled="is_co_detection_on",
|
||||
ufp_event_obj="last_cmonx_detect_event",
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Final
|
||||
|
||||
from uiprotect.data import ModelType, ProtectAdoptableDeviceModel
|
||||
|
||||
@@ -45,6 +45,9 @@ class ProtectButtonEntityDescription(
|
||||
ufp_press: str | None = None
|
||||
|
||||
|
||||
DEVICE_CLASS_CHIME_BUTTON: Final = "unifiprotect__chime_button"
|
||||
|
||||
|
||||
ALL_DEVICE_BUTTONS: tuple[ProtectButtonEntityDescription, ...] = (
|
||||
ProtectButtonEntityDescription(
|
||||
key="reboot",
|
||||
@@ -81,6 +84,7 @@ CHIME_BUTTONS: tuple[ProtectButtonEntityDescription, ...] = (
|
||||
ProtectButtonEntityDescription(
|
||||
key="play",
|
||||
translation_key="play_chime",
|
||||
device_class=DEVICE_CLASS_CHIME_BUTTON,
|
||||
ufp_press="play",
|
||||
),
|
||||
ProtectButtonEntityDescription(
|
||||
|
||||
@@ -83,7 +83,7 @@ def _async_device_entities(
|
||||
_LOGGER.debug(
|
||||
"Adding %s entity %s for %s",
|
||||
klass.__name__,
|
||||
description.key,
|
||||
description.name,
|
||||
device.display_name,
|
||||
)
|
||||
continue
|
||||
@@ -111,7 +111,7 @@ def _async_device_entities(
|
||||
_LOGGER.debug(
|
||||
"Adding %s entity %s for %s",
|
||||
klass.__name__,
|
||||
description.key,
|
||||
description.name,
|
||||
device.display_name,
|
||||
)
|
||||
|
||||
@@ -252,11 +252,16 @@ class BaseProtectEntity(Entity):
|
||||
|
||||
if changed:
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
device_name = device.name or ""
|
||||
if hasattr(self, "entity_description") and self.entity_description.name:
|
||||
device_name += f" {self.entity_description.name}"
|
||||
|
||||
_LOGGER.debug(
|
||||
"Updating state [%s] %s -> %s",
|
||||
self.entity_id,
|
||||
"Updating state [%s (%s)] %s -> %s",
|
||||
device_name,
|
||||
device.mac,
|
||||
previous_attrs,
|
||||
tuple(getter() for getter in self._state_getters),
|
||||
tuple((getattr(self, attr)) for attr in self._state_attrs),
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -377,7 +377,9 @@ class ProtectSelects(ProtectDeviceEntity, SelectEntity):
|
||||
entity_description.entity_category is not None
|
||||
and entity_description.ufp_options_fn is not None
|
||||
):
|
||||
_LOGGER.debug("Updating dynamic select options for %s", self.entity_id)
|
||||
_LOGGER.debug(
|
||||
"Updating dynamic select options for %s", entity_description.name
|
||||
)
|
||||
self._async_set_options(self.data, entity_description)
|
||||
if (unifi_value := entity_description.get_ufp_value(device)) is None:
|
||||
unifi_value = TYPE_EMPTY_VALUE
|
||||
|
||||
@@ -29,6 +29,8 @@ set_chime_paired_doorbells:
|
||||
selector:
|
||||
device:
|
||||
integration: unifiprotect
|
||||
entity:
|
||||
device_class: unifiprotect__chime_button
|
||||
doorbells:
|
||||
example: "binary_sensor.front_doorbell_doorbell"
|
||||
required: false
|
||||
|
||||
@@ -19,7 +19,6 @@ from homeassistant.components.webhook import (
|
||||
)
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import AUTO_SHUT_OFF_EVENT_NAME, DOMAIN
|
||||
@@ -51,12 +50,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: WatergateConfigEntry) ->
|
||||
)
|
||||
|
||||
watergate_client = WatergateLocalApiClient(
|
||||
base_url=(
|
||||
sonic_address
|
||||
if sonic_address.startswith("http")
|
||||
else f"http://{sonic_address}"
|
||||
),
|
||||
session=async_get_clientsession(hass),
|
||||
sonic_address if sonic_address.startswith("http") else f"http://{sonic_address}"
|
||||
)
|
||||
|
||||
coordinator = WatergateDataCoordinator(hass, entry, watergate_client)
|
||||
|
||||
@@ -11,7 +11,6 @@ from watergate_local_api.watergate_api import (
|
||||
from homeassistant.components.webhook import async_generate_id as webhook_generate_id
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -35,8 +34,7 @@ class WatergateConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
watergate_client = WatergateLocalApiClient(
|
||||
base_url=self.prepare_ip_address(user_input[CONF_IP_ADDRESS]),
|
||||
session=async_get_clientsession(self.hass),
|
||||
self.prepare_ip_address(user_input[CONF_IP_ADDRESS])
|
||||
)
|
||||
try:
|
||||
state = await watergate_client.async_get_device_state()
|
||||
|
||||
@@ -39,8 +39,3 @@ rules:
|
||||
integration-owner: done
|
||||
docs-installation-parameters: todo
|
||||
docs-configuration-parameters: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
|
||||
@@ -3344,7 +3344,7 @@
|
||||
},
|
||||
"kmtronic": {
|
||||
"name": "KMtronic",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -3363,7 +3363,7 @@
|
||||
},
|
||||
"kodi": {
|
||||
"name": "Kodi",
|
||||
"integration_type": "service",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -3386,13 +3386,13 @@
|
||||
},
|
||||
"kostal_plenticore": {
|
||||
"name": "Kostal Plenticore Solar Inverter",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"kraken": {
|
||||
"name": "Kraken",
|
||||
"integration_type": "service",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
@@ -3403,7 +3403,7 @@
|
||||
},
|
||||
"kulersky": {
|
||||
"name": "Kuler Sky",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -3439,7 +3439,7 @@
|
||||
},
|
||||
"landisgyr_heat_meter": {
|
||||
"name": "Landis+Gyr Heat Meter",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -3451,7 +3451,7 @@
|
||||
},
|
||||
"lastfm": {
|
||||
"name": "Last.fm",
|
||||
"integration_type": "service",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
@@ -3482,13 +3482,13 @@
|
||||
},
|
||||
"leaone": {
|
||||
"name": "LeaOne",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"led_ble": {
|
||||
"name": "LED BLE",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -3536,7 +3536,7 @@
|
||||
"name": "LG Netcast"
|
||||
},
|
||||
"lg_soundbar": {
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "LG Soundbars"
|
||||
@@ -3569,7 +3569,7 @@
|
||||
},
|
||||
"lifx": {
|
||||
"name": "LIFX",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -3727,7 +3727,7 @@
|
||||
},
|
||||
"loqed": {
|
||||
"name": "LOQED Touch Smart Lock",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -3796,7 +3796,7 @@
|
||||
},
|
||||
"mailgun": {
|
||||
"name": "Mailgun",
|
||||
"integration_type": "service",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_push"
|
||||
},
|
||||
@@ -3866,7 +3866,7 @@
|
||||
},
|
||||
"medcom_ble": {
|
||||
"name": "Medcom Bluetooth",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -3899,7 +3899,7 @@
|
||||
"name": "Melnor",
|
||||
"integrations": {
|
||||
"melnor": {
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Melnor Bluetooth"
|
||||
|
||||
9
requirements_all.txt
generated
9
requirements_all.txt
generated
@@ -252,7 +252,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==43.3.0
|
||||
aioesphomeapi==43.0.0
|
||||
|
||||
# homeassistant.components.matrix
|
||||
# homeassistant.components.slack
|
||||
@@ -1918,6 +1918,9 @@ pybalboa==1.1.3
|
||||
# homeassistant.components.bbox
|
||||
pybbox==0.0.5-alpha
|
||||
|
||||
# homeassistant.components.blackbird
|
||||
pyblackbird==0.6
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
pyblu==2.0.5
|
||||
|
||||
@@ -2297,7 +2300,7 @@ pypaperless==4.1.1
|
||||
pypca==0.0.7
|
||||
|
||||
# homeassistant.components.lcn
|
||||
pypck==0.9.8
|
||||
pypck==0.9.7
|
||||
|
||||
# homeassistant.components.pglab
|
||||
pypglab==0.0.5
|
||||
@@ -2511,7 +2514,7 @@ python-google-weather-api==0.0.4
|
||||
python-homeassistant-analytics==0.9.0
|
||||
|
||||
# homeassistant.components.homewizard
|
||||
python-homewizard-energy==10.0.0
|
||||
python-homewizard-energy==9.3.0
|
||||
|
||||
# homeassistant.components.hp_ilo
|
||||
python-hpilo==4.4.3
|
||||
|
||||
9
requirements_test_all.txt
generated
9
requirements_test_all.txt
generated
@@ -243,7 +243,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==43.3.0
|
||||
aioesphomeapi==43.0.0
|
||||
|
||||
# homeassistant.components.matrix
|
||||
# homeassistant.components.slack
|
||||
@@ -1637,6 +1637,9 @@ pyaussiebb==0.1.5
|
||||
# homeassistant.components.balboa
|
||||
pybalboa==1.1.3
|
||||
|
||||
# homeassistant.components.blackbird
|
||||
pyblackbird==0.6
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
pyblu==2.0.5
|
||||
|
||||
@@ -1938,7 +1941,7 @@ pypalazzetti==0.1.20
|
||||
pypaperless==4.1.1
|
||||
|
||||
# homeassistant.components.lcn
|
||||
pypck==0.9.8
|
||||
pypck==0.9.7
|
||||
|
||||
# homeassistant.components.pglab
|
||||
pypglab==0.0.5
|
||||
@@ -2104,7 +2107,7 @@ python-google-weather-api==0.0.4
|
||||
python-homeassistant-analytics==0.9.0
|
||||
|
||||
# homeassistant.components.homewizard
|
||||
python-homewizard-energy==10.0.0
|
||||
python-homewizard-energy==9.3.0
|
||||
|
||||
# homeassistant.components.izone
|
||||
python-izone==1.2.9
|
||||
|
||||
@@ -12,7 +12,7 @@ from .model import Config, Integration
|
||||
from .requirements import PACKAGE_REGEX, PIP_VERSION_RANGE_SEPARATOR
|
||||
|
||||
_GO2RTC_SHA = (
|
||||
"f394f6329f5389a4c9a7fc54b09fdec9621bbb78bf7a672b973440bbdfb02241" # 1.9.13
|
||||
"baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640" # 1.9.12
|
||||
)
|
||||
|
||||
DOCKERFILE_TEMPLATE = r"""# Automatically generated by hassfest.
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
"""Fixtures for component."""
|
||||
|
||||
collect_ignore_glob = ["test_*.py"]
|
||||
@@ -25,7 +25,6 @@ async def setup_config_entry(
|
||||
device: Mock | None = None,
|
||||
fritz: Mock | None = None,
|
||||
template: Mock | None = None,
|
||||
trigger: Mock | None = None,
|
||||
) -> MockConfigEntry:
|
||||
"""Do setup of a MockConfigEntry."""
|
||||
entry = MockConfigEntry(
|
||||
@@ -40,9 +39,6 @@ async def setup_config_entry(
|
||||
if template is not None and fritz is not None:
|
||||
fritz().get_templates.return_value = [template]
|
||||
|
||||
if trigger is not None and fritz is not None:
|
||||
fritz().get_triggers.return_value = [trigger]
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
if device is not None:
|
||||
await hass.async_block_till_done()
|
||||
@@ -50,10 +46,7 @@ async def setup_config_entry(
|
||||
|
||||
|
||||
def set_devices(
|
||||
fritz: Mock,
|
||||
devices: list[Mock] | None = None,
|
||||
templates: list[Mock] | None = None,
|
||||
triggers: list[Mock] | None = None,
|
||||
fritz: Mock, devices: list[Mock] | None = None, templates: list[Mock] | None = None
|
||||
) -> None:
|
||||
"""Set list of devices or templates."""
|
||||
if devices is not None:
|
||||
@@ -62,9 +55,6 @@ def set_devices(
|
||||
if templates is not None:
|
||||
fritz().get_templates.return_value = templates
|
||||
|
||||
if triggers is not None:
|
||||
fritz().get_triggers.return_value = triggers
|
||||
|
||||
|
||||
class FritzEntityBaseMock(Mock):
|
||||
"""base mock of a AVM Fritz!Box binary sensor device."""
|
||||
@@ -209,11 +199,3 @@ class FritzDeviceCoverUnknownPositionMock(FritzDeviceCoverMock):
|
||||
"""Mock of a AVM Fritz!Box cover device with unknown position."""
|
||||
|
||||
levelpercentage = None
|
||||
|
||||
|
||||
class FritzTriggerMock(FritzEntityBaseMock):
|
||||
"""Mock of a AVM Fritz!Box smarthome trigger."""
|
||||
|
||||
active = True
|
||||
ain = "trg1234 56789"
|
||||
name = "fake_trigger"
|
||||
|
||||
@@ -47,51 +47,3 @@
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[switch.fake_trigger-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': None,
|
||||
'entity_id': 'switch.fake_trigger',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'fake_trigger',
|
||||
'platform': 'fritzbox',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'trg1234 56789',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[switch.fake_trigger-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'fake_trigger',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.fake_trigger',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -23,13 +23,12 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import FritzDeviceSwitchMock, FritzTriggerMock, set_devices, setup_config_entry
|
||||
from . import FritzDeviceSwitchMock, set_devices, setup_config_entry
|
||||
from .const import CONF_FAKE_NAME, MOCK_CONFIG
|
||||
|
||||
from tests.common import async_fire_time_changed, snapshot_platform
|
||||
|
||||
SWITCH_ENTITY_ID = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}"
|
||||
TRIGGER_ENTITY_ID = f"{SWITCH_DOMAIN}.fake_trigger"
|
||||
ENTITY_ID = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}"
|
||||
|
||||
|
||||
async def test_setup(
|
||||
@@ -40,56 +39,50 @@ async def test_setup(
|
||||
) -> None:
|
||||
"""Test setup of platform."""
|
||||
device = FritzDeviceSwitchMock()
|
||||
trigger = FritzTriggerMock()
|
||||
|
||||
with patch("homeassistant.components.fritzbox.PLATFORMS", [Platform.SWITCH]):
|
||||
entry = await setup_config_entry(
|
||||
hass,
|
||||
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
|
||||
device=device,
|
||||
fritz=fritz,
|
||||
trigger=trigger,
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
|
||||
)
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id)
|
||||
|
||||
|
||||
async def test_switch_turn_on(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test turn switch device on."""
|
||||
async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test turn device on."""
|
||||
device = FritzDeviceSwitchMock()
|
||||
await setup_config_entry(
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
|
||||
)
|
||||
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, True
|
||||
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True
|
||||
)
|
||||
assert device.set_switch_state_on.call_count == 1
|
||||
|
||||
|
||||
async def test_switch_turn_off(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test turn switch device off."""
|
||||
async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test turn device off."""
|
||||
device = FritzDeviceSwitchMock()
|
||||
|
||||
await setup_config_entry(
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
|
||||
)
|
||||
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, True
|
||||
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True
|
||||
)
|
||||
|
||||
assert device.set_switch_state_off.call_count == 1
|
||||
|
||||
|
||||
async def test_switch_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test toggling while switch device is locked."""
|
||||
async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test toggling while device is locked."""
|
||||
device = FritzDeviceSwitchMock()
|
||||
device.lock = True
|
||||
|
||||
await setup_config_entry(
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
|
||||
)
|
||||
|
||||
with pytest.raises(
|
||||
@@ -97,7 +90,7 @@ async def test_switch_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> N
|
||||
match="Can't toggle switch while manual switching is disabled for the device",
|
||||
):
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, True
|
||||
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True
|
||||
)
|
||||
|
||||
with pytest.raises(
|
||||
@@ -105,23 +98,17 @@ async def test_switch_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> N
|
||||
match="Can't toggle switch while manual switching is disabled for the device",
|
||||
):
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, True
|
||||
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True
|
||||
)
|
||||
|
||||
|
||||
async def test_update(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test update without error."""
|
||||
device = FritzDeviceSwitchMock()
|
||||
trigger = FritzTriggerMock()
|
||||
await setup_config_entry(
|
||||
hass,
|
||||
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
|
||||
device=device,
|
||||
fritz=fritz,
|
||||
trigger=trigger,
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
|
||||
)
|
||||
assert fritz().update_devices.call_count == 1
|
||||
assert fritz().update_triggers.call_count == 1
|
||||
assert fritz().login.call_count == 1
|
||||
|
||||
next_update = dt_util.utcnow() + timedelta(seconds=200)
|
||||
@@ -129,7 +116,6 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
assert fritz().update_devices.call_count == 2
|
||||
assert fritz().update_triggers.call_count == 2
|
||||
assert fritz().login.call_count == 1
|
||||
|
||||
|
||||
@@ -138,7 +124,7 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
device = FritzDeviceSwitchMock()
|
||||
fritz().update_devices.side_effect = HTTPError("Boom")
|
||||
entry = await setup_config_entry(
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
|
||||
)
|
||||
assert entry.state is ConfigEntryState.SETUP_RETRY
|
||||
assert fritz().update_devices.call_count == 2
|
||||
@@ -159,10 +145,10 @@ async def test_assume_device_unavailable(hass: HomeAssistant, fritz: Mock) -> No
|
||||
device.energy = 0
|
||||
device.power = 0
|
||||
await setup_config_entry(
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
|
||||
)
|
||||
|
||||
state = hass.states.get(SWITCH_ENTITY_ID)
|
||||
state = hass.states.get(ENTITY_ID)
|
||||
assert state
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
@@ -170,19 +156,13 @@ async def test_assume_device_unavailable(hass: HomeAssistant, fritz: Mock) -> No
|
||||
async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test adding new discovered devices during runtime."""
|
||||
device = FritzDeviceSwitchMock()
|
||||
trigger = FritzTriggerMock()
|
||||
await setup_config_entry(
|
||||
hass,
|
||||
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
|
||||
device=device,
|
||||
fritz=fritz,
|
||||
trigger=trigger,
|
||||
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
|
||||
)
|
||||
|
||||
assert hass.states.get(SWITCH_ENTITY_ID)
|
||||
assert hass.states.get(TRIGGER_ENTITY_ID)
|
||||
state = hass.states.get(ENTITY_ID)
|
||||
assert state
|
||||
|
||||
# add new switch device
|
||||
new_device = FritzDeviceSwitchMock()
|
||||
new_device.ain = "7890 1234"
|
||||
new_device.name = "new_switch"
|
||||
@@ -192,48 +172,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
async_fire_time_changed(hass, next_update)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
assert hass.states.get(f"{SWITCH_DOMAIN}.new_switch")
|
||||
|
||||
# add new trigger
|
||||
new_trigger = FritzTriggerMock()
|
||||
new_trigger.ain = "trg7890 1234"
|
||||
new_trigger.name = "new_trigger"
|
||||
set_devices(fritz, triggers=[trigger, new_trigger])
|
||||
|
||||
next_update = dt_util.utcnow() + timedelta(seconds=200)
|
||||
async_fire_time_changed(hass, next_update)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
assert hass.states.get(f"{SWITCH_DOMAIN}.new_trigger")
|
||||
|
||||
|
||||
async def test_activate_trigger(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test activating a FRITZ! trigger."""
|
||||
trigger = FritzTriggerMock()
|
||||
await setup_config_entry(
|
||||
hass,
|
||||
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
|
||||
fritz=fritz,
|
||||
trigger=trigger,
|
||||
)
|
||||
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: TRIGGER_ENTITY_ID}, True
|
||||
)
|
||||
assert fritz().set_trigger_active.call_count == 1
|
||||
|
||||
|
||||
async def test_deactivate_trigger(hass: HomeAssistant, fritz: Mock) -> None:
|
||||
"""Test deactivating a FRITZ! trigger."""
|
||||
trigger = FritzTriggerMock()
|
||||
await setup_config_entry(
|
||||
hass,
|
||||
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
|
||||
fritz=fritz,
|
||||
trigger=trigger,
|
||||
)
|
||||
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: TRIGGER_ENTITY_ID}, True
|
||||
)
|
||||
assert fritz().set_trigger_inactive.call_count == 1
|
||||
state = hass.states.get(f"{SWITCH_DOMAIN}.new_switch")
|
||||
assert state
|
||||
|
||||
@@ -129,15 +129,13 @@ def config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"stream_source": "http://janebloggs:letmein2@example.com/stream",
|
||||
"username": "johnbloggs",
|
||||
"password": "letmein123",
|
||||
"content_type": "image/jpeg",
|
||||
"advanced": {
|
||||
"framerate": 2.0,
|
||||
"verify_ssl": True,
|
||||
"limit_refetch_to_url_change": False,
|
||||
"authentication": "basic",
|
||||
"framerate": 2.0,
|
||||
"verify_ssl": True,
|
||||
"content_type": "image/jpeg",
|
||||
},
|
||||
},
|
||||
version=2,
|
||||
version=1,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
return entry
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
import contextlib
|
||||
from copy import deepcopy
|
||||
import errno
|
||||
from http import HTTPStatus
|
||||
import os.path
|
||||
@@ -25,7 +24,6 @@ from homeassistant.components.generic.const import (
|
||||
CONF_STILL_IMAGE_URL,
|
||||
CONF_STREAM_SOURCE,
|
||||
DOMAIN,
|
||||
SECTION_ADVANCED,
|
||||
)
|
||||
from homeassistant.components.stream import (
|
||||
CONF_RTSP_TRANSPORT,
|
||||
@@ -50,13 +48,11 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator
|
||||
TESTDATA = {
|
||||
CONF_STILL_IMAGE_URL: "http://127.0.0.1/testurl/1",
|
||||
CONF_STREAM_SOURCE: "http://127.0.0.1/testurl/2",
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_USERNAME: "fred_flintstone",
|
||||
CONF_PASSWORD: "bambam",
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_FRAMERATE: 5,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
}
|
||||
|
||||
TESTDATA_ONLYSTILL = TESTDATA.copy()
|
||||
@@ -65,6 +61,11 @@ TESTDATA_ONLYSTILL.pop(CONF_STREAM_SOURCE)
|
||||
TESTDATA_ONLYSTREAM = TESTDATA.copy()
|
||||
TESTDATA_ONLYSTREAM.pop(CONF_STILL_IMAGE_URL)
|
||||
|
||||
TESTDATA_OPTIONS = {
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
**TESTDATA,
|
||||
}
|
||||
|
||||
|
||||
@respx.mock
|
||||
@pytest.mark.usefixtures("fakeimg_png")
|
||||
@@ -113,14 +114,12 @@ async def test_form(
|
||||
assert result2["options"] == {
|
||||
CONF_STILL_IMAGE_URL: "http://127.0.0.1/testurl/1",
|
||||
CONF_STREAM_SOURCE: "http://127.0.0.1/testurl/2",
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_USERNAME: "fred_flintstone",
|
||||
CONF_PASSWORD: "bambam",
|
||||
CONF_CONTENT_TYPE: "image/png",
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_FRAMERATE: 5.0,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
}
|
||||
|
||||
# Check that the preview image is disabled after.
|
||||
@@ -151,14 +150,12 @@ async def test_form_only_stillimage(
|
||||
assert result2["title"] == "127_0_0_1"
|
||||
assert result2["options"] == {
|
||||
CONF_STILL_IMAGE_URL: "http://127.0.0.1/testurl/1",
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_USERNAME: "fred_flintstone",
|
||||
CONF_PASSWORD: "bambam",
|
||||
CONF_CONTENT_TYPE: "image/png",
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_FRAMERATE: 5.0,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
}
|
||||
|
||||
assert respx.calls.call_count == 1
|
||||
@@ -379,8 +376,8 @@ async def test_form_rtsp_mode(
|
||||
mock_setup_entry: _patch[MagicMock],
|
||||
) -> None:
|
||||
"""Test we complete ok if the user enters a stream url."""
|
||||
data = deepcopy(TESTDATA)
|
||||
data[SECTION_ADVANCED][CONF_RTSP_TRANSPORT] = "tcp"
|
||||
data = TESTDATA.copy()
|
||||
data[CONF_RTSP_TRANSPORT] = "tcp"
|
||||
data[CONF_STREAM_SOURCE] = "rtsp://127.0.0.1/testurl/2"
|
||||
result1 = await hass.config_entries.flow.async_configure(user_flow["flow_id"], data)
|
||||
assert result1["type"] is FlowResultType.FORM
|
||||
@@ -393,16 +390,14 @@ async def test_form_rtsp_mode(
|
||||
assert result2["title"] == "127_0_0_1"
|
||||
assert result2["options"] == {
|
||||
CONF_STILL_IMAGE_URL: "http://127.0.0.1/testurl/1",
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_STREAM_SOURCE: "rtsp://127.0.0.1/testurl/2",
|
||||
CONF_RTSP_TRANSPORT: "tcp",
|
||||
CONF_USERNAME: "fred_flintstone",
|
||||
CONF_PASSWORD: "bambam",
|
||||
CONF_CONTENT_TYPE: "image/png",
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_FRAMERATE: 5.0,
|
||||
CONF_VERIFY_SSL: False,
|
||||
CONF_RTSP_TRANSPORT: "tcp",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -428,15 +423,13 @@ async def test_form_only_stream(
|
||||
|
||||
assert result2["title"] == "127_0_0_1"
|
||||
assert result2["options"] == {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_STREAM_SOURCE: "rtsp://user:pass@127.0.0.1/testurl/2",
|
||||
CONF_USERNAME: "fred_flintstone",
|
||||
CONF_PASSWORD: "bambam",
|
||||
CONF_CONTENT_TYPE: "image/jpeg",
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_FRAMERATE: 5.0,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
}
|
||||
|
||||
with patch(
|
||||
@@ -454,12 +447,10 @@ async def test_form_still_and_stream_not_provided(
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
user_flow["flow_id"],
|
||||
{
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_FRAMERATE: 5,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
},
|
||||
)
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
assert result2["errors"] == {"base": "no_still_image_or_stream_url"}
|
||||
@@ -896,17 +887,8 @@ async def test_migrate_existing_ids(
|
||||
hass: HomeAssistant, entity_registry: er.EntityRegistry
|
||||
) -> None:
|
||||
"""Test that existing ids are migrated for issue #70568."""
|
||||
test_data = {
|
||||
CONF_STILL_IMAGE_URL: "http://127.0.0.1/testurl/1",
|
||||
CONF_STREAM_SOURCE: "http://127.0.0.1/testurl/2",
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_USERNAME: "fred_flintstone",
|
||||
CONF_PASSWORD: "bambam",
|
||||
CONF_FRAMERATE: 5,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_VERIFY_SSL: False,
|
||||
}
|
||||
|
||||
test_data = TESTDATA_OPTIONS.copy()
|
||||
test_data[CONF_CONTENT_TYPE] = "image/png"
|
||||
old_unique_id = "54321"
|
||||
entity_id = "camera.sample_camera"
|
||||
@@ -952,12 +934,9 @@ async def test_options_use_wallclock_as_timestamps(
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "init"
|
||||
|
||||
data = deepcopy(TESTDATA)
|
||||
data[SECTION_ADVANCED][CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = True
|
||||
result2 = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input=data,
|
||||
user_input={CONF_USE_WALLCLOCK_AS_TIMESTAMPS: True, **TESTDATA},
|
||||
)
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
|
||||
@@ -987,7 +966,7 @@ async def test_options_use_wallclock_as_timestamps(
|
||||
assert result3["step_id"] == "init"
|
||||
result4 = await hass.config_entries.options.async_configure(
|
||||
result3["flow_id"],
|
||||
user_input=data,
|
||||
user_input={CONF_USE_WALLCLOCK_AS_TIMESTAMPS: True, **TESTDATA},
|
||||
)
|
||||
assert result4["type"] is FlowResultType.FORM
|
||||
assert result4["step_id"] == "user_confirm"
|
||||
|
||||
@@ -26,13 +26,11 @@ async def test_entry_diagnostics(
|
||||
"stream_source": "http://****:****@example.com/****",
|
||||
"username": REDACTED,
|
||||
"password": REDACTED,
|
||||
"content_type": "image/jpeg",
|
||||
"advanced": {
|
||||
"limit_refetch_to_url_change": False,
|
||||
"authentication": "basic",
|
||||
"framerate": 2.0,
|
||||
"verify_ssl": True,
|
||||
},
|
||||
"content_type": "image/jpeg",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -2,23 +2,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.generic.const import (
|
||||
CONF_CONTENT_TYPE,
|
||||
CONF_FRAMERATE,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE,
|
||||
CONF_STILL_IMAGE_URL,
|
||||
CONF_STREAM_SOURCE,
|
||||
DOMAIN,
|
||||
SECTION_ADVANCED,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import (
|
||||
CONF_AUTHENTICATION,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
HTTP_BASIC_AUTHENTICATION,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
@@ -51,44 +35,3 @@ async def test_reload_on_title_change(
|
||||
assert (
|
||||
hass.states.get("camera.test_camera").attributes["friendly_name"] == "New Title"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("fakeimg_png")
|
||||
async def test_migration_to_version_2(hass: HomeAssistant) -> None:
|
||||
"""Test the File sensor with JSON entries."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="Test Camera",
|
||||
unique_id="abc123",
|
||||
data={},
|
||||
options={
|
||||
CONF_STILL_IMAGE_URL: "http://joebloggs:letmein1@example.com/secret1/file.jpg?pw=qwerty",
|
||||
CONF_STREAM_SOURCE: "http://janebloggs:letmein2@example.com/stream",
|
||||
CONF_USERNAME: "johnbloggs",
|
||||
CONF_PASSWORD: "letmein123",
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_FRAMERATE: 2.0,
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_CONTENT_TYPE: "image/jpeg",
|
||||
},
|
||||
version=1,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
assert entry.version == 2
|
||||
assert entry.options == {
|
||||
CONF_STILL_IMAGE_URL: "http://joebloggs:letmein1@example.com/secret1/file.jpg?pw=qwerty",
|
||||
CONF_STREAM_SOURCE: "http://janebloggs:letmein2@example.com/stream",
|
||||
CONF_USERNAME: "johnbloggs",
|
||||
CONF_PASSWORD: "letmein123",
|
||||
CONF_CONTENT_TYPE: "image/jpeg",
|
||||
SECTION_ADVANCED: {
|
||||
CONF_FRAMERATE: 2.0,
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
{
|
||||
"mode": "zero",
|
||||
"permissions": ["charge_allowed", "discharge_allowed"],
|
||||
"battery_count": 2,
|
||||
"power_w": -404,
|
||||
"target_power_w": -400,
|
||||
"max_consumption_w": 1600,
|
||||
|
||||
@@ -279,14 +279,9 @@
|
||||
dict({
|
||||
'data': dict({
|
||||
'batteries': dict({
|
||||
'battery_count': 2,
|
||||
'max_consumption_w': 1600.0,
|
||||
'max_production_w': 800.0,
|
||||
'mode': 'zero',
|
||||
'permissions': list([
|
||||
'charge_allowed',
|
||||
'discharge_allowed',
|
||||
]),
|
||||
'power_w': -404.0,
|
||||
'target_power_w': -400.0,
|
||||
}),
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Device Battery group mode',
|
||||
'options': list([
|
||||
'standby',
|
||||
'to_full',
|
||||
'zero',
|
||||
<Mode.ZERO: 'zero'>,
|
||||
<Mode.STANDBY: 'standby'>,
|
||||
<Mode.TO_FULL: 'to_full'>,
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
@@ -24,9 +24,9 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'standby',
|
||||
'to_full',
|
||||
'zero',
|
||||
<Mode.ZERO: 'zero'>,
|
||||
<Mode.STANDBY: 'standby'>,
|
||||
<Mode.TO_FULL: 'to_full'>,
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
|
||||
228
tests/components/input_boolean/test_trigger.py
Normal file
228
tests/components/input_boolean/test_trigger.py
Normal file
@@ -0,0 +1,228 @@
|
||||
"""Test input boolean triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.input_boolean import DOMAIN
|
||||
from homeassistant.const import ATTR_LABEL_ID, CONF_ENTITY_ID, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
|
||||
from tests.components import (
|
||||
StateDescription,
|
||||
arm_trigger,
|
||||
parametrize_target_entities,
|
||||
parametrize_trigger_states,
|
||||
set_or_remove_state,
|
||||
target_entities,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture(name="enable_experimental_triggers_conditions")
|
||||
def enable_experimental_triggers_conditions() -> Generator[None]:
|
||||
"""Enable experimental triggers and conditions."""
|
||||
with patch(
|
||||
"homeassistant.components.labs.async_is_preview_feature_enabled",
|
||||
return_value=True,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_input_booleans(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple input_boolean entities associated with different targets."""
|
||||
return (await target_entities(hass, DOMAIN))["included"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"trigger_key",
|
||||
[
|
||||
"input_boolean.turned_off",
|
||||
"input_boolean.turned_on",
|
||||
],
|
||||
)
|
||||
async def test_input_boolean_triggers_gated_by_labs_flag(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, trigger_key: str
|
||||
) -> None:
|
||||
"""Test the input_boolean triggers are gated by the labs flag."""
|
||||
await arm_trigger(hass, trigger_key, None, {ATTR_LABEL_ID: "test_label"})
|
||||
assert (
|
||||
"Unnamed automation failed to setup triggers and has been disabled: Trigger "
|
||||
f"'{trigger_key}' requires the experimental 'New triggers and conditions' "
|
||||
"feature to be enabled in Home Assistant Labs settings (feature flag: "
|
||||
"'new_triggers_conditions')"
|
||||
) in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="input_boolean.turned_off",
|
||||
target_states=[STATE_OFF],
|
||||
other_states=[STATE_ON],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger="input_boolean.turned_on",
|
||||
target_states=[STATE_ON],
|
||||
other_states=[STATE_OFF],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_input_boolean_state_trigger_behavior_any(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_input_booleans: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the input_boolean state trigger fires when any input_boolean state changes to a specific state."""
|
||||
other_entity_ids = set(target_input_booleans) - {entity_id}
|
||||
|
||||
# Set all input_booleans, including the tested one, to the initial state
|
||||
for eid in target_input_booleans:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(hass, trigger, {}, trigger_target_config)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
# Check if changing other input_booleans also triggers
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == (entities_in_target - 1) * state["count"]
|
||||
service_calls.clear()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="input_boolean.turned_off",
|
||||
target_states=[STATE_OFF],
|
||||
other_states=[STATE_ON],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger="input_boolean.turned_on",
|
||||
target_states=[STATE_ON],
|
||||
other_states=[STATE_OFF],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_input_boolean_state_trigger_behavior_first(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_input_booleans: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the input_boolean state trigger fires when the first input_boolean changes to a specific state."""
|
||||
other_entity_ids = set(target_input_booleans) - {entity_id}
|
||||
|
||||
# Set all input_booleans, including the tested one, to the initial state
|
||||
for eid in target_input_booleans:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(hass, trigger, {"behavior": "first"}, trigger_target_config)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
# Triggering other input_booleans should not cause the trigger to fire again
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="input_boolean.turned_off",
|
||||
target_states=[STATE_OFF],
|
||||
other_states=[STATE_ON],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger="input_boolean.turned_on",
|
||||
target_states=[STATE_ON],
|
||||
other_states=[STATE_OFF],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_input_boolean_state_trigger_behavior_last(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_input_booleans: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the input_boolean state trigger fires when the last input_boolean changes to a specific state."""
|
||||
other_entity_ids = set(target_input_booleans) - {entity_id}
|
||||
|
||||
# Set all input_booleans, including the tested one, to the initial state
|
||||
for eid in target_input_booleans:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(hass, trigger, {"behavior": "last"}, trigger_target_config)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
@@ -82,6 +82,9 @@ class KNXTestKit:
|
||||
|
||||
async def patch_xknx_start():
|
||||
"""Patch `xknx.start` for unittests."""
|
||||
self.xknx.cemi_handler.send_telegram = AsyncMock(
|
||||
side_effect=self._outgoing_telegrams.append
|
||||
)
|
||||
# after XKNX.__init__() to not overwrite it by the config entry again
|
||||
# before StateUpdater starts to avoid slow down of tests
|
||||
self.xknx.rate_limit = 0
|
||||
@@ -115,18 +118,13 @@ class KNXTestKit:
|
||||
if add_entry_to_hass:
|
||||
self.mock_config_entry.add_to_hass(self.hass)
|
||||
|
||||
# capture outgoing telegrams for assertion instead of sending to socket
|
||||
# before l_data_confirmation would be awaited in xknx
|
||||
patch(
|
||||
"xknx.cemi.cemi_handler.CEMIHandler.send_telegram",
|
||||
side_effect=self._outgoing_telegrams.append,
|
||||
).start() # keep patched for the whole test run
|
||||
|
||||
knx_config = {DOMAIN: yaml_config or {}}
|
||||
with patch(
|
||||
with (
|
||||
patch(
|
||||
"xknx.xknx.knx_interface_factory",
|
||||
return_value=knx_ip_interface_mock(),
|
||||
side_effect=fish_xknx,
|
||||
),
|
||||
):
|
||||
state_updater_patcher = patch(
|
||||
"xknx.xknx.StateUpdater.register_remote_value"
|
||||
@@ -136,7 +134,7 @@ class KNXTestKit:
|
||||
|
||||
await async_setup_component(self.hass, DOMAIN, knx_config)
|
||||
await self.hass.async_block_till_done()
|
||||
# remove patch after setup so state_updater can be tested
|
||||
|
||||
state_updater_patcher.stop()
|
||||
|
||||
########################
|
||||
|
||||
@@ -66,7 +66,6 @@ from .mock_data import (
|
||||
MAP_DATA,
|
||||
MULTI_MAP_LIST,
|
||||
NETWORK_INFO_BY_DEVICE,
|
||||
Q7_B01_PROPS,
|
||||
ROBOROCK_RRUID,
|
||||
ROOM_MAPPING,
|
||||
SCENES,
|
||||
@@ -107,13 +106,6 @@ def create_zeo_trait() -> Mock:
|
||||
return zeo_trait
|
||||
|
||||
|
||||
def create_b01_q7_trait() -> Mock:
|
||||
"""Create B01 Q7 trait for B01 devices."""
|
||||
b01_trait = AsyncMock()
|
||||
b01_trait.query_values.return_value = Q7_B01_PROPS
|
||||
return b01_trait
|
||||
|
||||
|
||||
@pytest.fixture(name="bypass_api_client_fixture")
|
||||
def bypass_api_client_fixture() -> None:
|
||||
"""Skip calls to the API client."""
|
||||
@@ -340,8 +332,6 @@ def fake_devices_fixture() -> list[FakeDevice]:
|
||||
fake_device.zeo = create_zeo_trait()
|
||||
else:
|
||||
raise ValueError("Unknown A01 category in test HOME_DATA")
|
||||
elif device_data.pv == "B01":
|
||||
fake_device.b01_q7_properties = create_b01_q7_trait()
|
||||
else:
|
||||
raise ValueError("Unknown pv in test HOME_DATA")
|
||||
devices.append(fake_device)
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from PIL import Image
|
||||
from roborock.data import (
|
||||
B01Props,
|
||||
CleanRecord,
|
||||
CleanSummary,
|
||||
Consumable,
|
||||
@@ -16,7 +15,6 @@ from roborock.data import (
|
||||
S7Status,
|
||||
UserData,
|
||||
ValleyElectricityTimer,
|
||||
WorkStatusMapping,
|
||||
)
|
||||
from vacuum_map_parser_base.config.image_config import ImageConfig
|
||||
from vacuum_map_parser_base.map_data import ImageData
|
||||
@@ -532,239 +530,6 @@ HOME_DATA_RAW = {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "q7_product_id",
|
||||
"name": "Roborock Q7 Series",
|
||||
"model": "roborock.vacuum.sc01",
|
||||
"category": "robot.vacuum.cleaner",
|
||||
"capability": 0,
|
||||
"schema": [
|
||||
{
|
||||
"id": 101,
|
||||
"name": "RPC Request",
|
||||
"code": "rpc_request",
|
||||
"mode": "rw",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 102,
|
||||
"name": "RPC Response",
|
||||
"code": "rpc_response",
|
||||
"mode": "rw",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 120,
|
||||
"name": "错误代码",
|
||||
"code": "error_code",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 121,
|
||||
"name": "设备状态",
|
||||
"code": "state",
|
||||
"mode": "ro",
|
||||
"type": "VALUE",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 122,
|
||||
"name": "设备电量",
|
||||
"code": "battery",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 123,
|
||||
"name": "吸力档位",
|
||||
"code": "fan_power",
|
||||
"mode": "rw",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 124,
|
||||
"name": "拖地档位",
|
||||
"code": "water_box_mode",
|
||||
"mode": "rw",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 125,
|
||||
"name": "主刷寿命",
|
||||
"code": "main_brush_life",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 126,
|
||||
"name": "边刷寿命",
|
||||
"code": "side_brush_life",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 127,
|
||||
"name": "滤网寿命",
|
||||
"code": "filter_life",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 135,
|
||||
"name": "离线原因",
|
||||
"code": "offline_status",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 136,
|
||||
"name": "清洁次数",
|
||||
"code": "clean_times",
|
||||
"mode": "rw",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 137,
|
||||
"name": "扫拖模式",
|
||||
"code": "cleaning_preference",
|
||||
"mode": "rw",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 138,
|
||||
"name": "清洁任务类型",
|
||||
"code": "clean_task_type",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 139,
|
||||
"name": "返回基站类型",
|
||||
"code": "back_type",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 141,
|
||||
"name": "清洁进度",
|
||||
"code": "cleaning_progress",
|
||||
"mode": "ro",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 142,
|
||||
"name": "窜货信息",
|
||||
"code": "fc_state",
|
||||
"mode": "ro",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 201,
|
||||
"name": "启动清洁任务",
|
||||
"code": "start_clean_task",
|
||||
"mode": "wo",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 202,
|
||||
"name": "返回基站任务",
|
||||
"code": "start_back_dock_task",
|
||||
"mode": "wo",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 203,
|
||||
"name": "启动基站任务",
|
||||
"code": "start_dock_task",
|
||||
"mode": "wo",
|
||||
"type": "ENUM",
|
||||
"property": '{"range": []}',
|
||||
},
|
||||
{
|
||||
"id": 204,
|
||||
"name": "暂停任务",
|
||||
"code": "pause",
|
||||
"mode": "wo",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 205,
|
||||
"name": "继续任务",
|
||||
"code": "resume",
|
||||
"mode": "wo",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 206,
|
||||
"name": "结束任务",
|
||||
"code": "stop",
|
||||
"mode": "wo",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 10000,
|
||||
"name": "request_cmd",
|
||||
"code": "request_cmd",
|
||||
"mode": "wo",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 10001,
|
||||
"name": "response_cmd",
|
||||
"code": "response_cmd",
|
||||
"mode": "ro",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 10002,
|
||||
"name": "request_map",
|
||||
"code": "request_map",
|
||||
"mode": "ro",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 10003,
|
||||
"name": "response_map",
|
||||
"code": "response_map",
|
||||
"mode": "ro",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
{
|
||||
"id": 10004,
|
||||
"name": "event_report",
|
||||
"code": "event_report",
|
||||
"mode": "rw",
|
||||
"type": "RAW",
|
||||
"property": "null",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "zeo_id",
|
||||
"name": "Zeo One",
|
||||
@@ -1186,45 +951,6 @@ HOME_DATA_RAW = {
|
||||
"silentOtaSwitch": False,
|
||||
"f": False,
|
||||
},
|
||||
{
|
||||
"duid": "q7_duid",
|
||||
"name": "Roborock Q7",
|
||||
"localKey": "q7_local_key",
|
||||
"productId": "q7_product_id",
|
||||
"fv": "03.01.71",
|
||||
"activeTime": 1749513705,
|
||||
"timeZoneId": "Pacific/Auckland",
|
||||
"iconUrl": "",
|
||||
"share": True,
|
||||
"shareTime": 1754789238,
|
||||
"online": True,
|
||||
"pv": "B01",
|
||||
"tuyaMigrated": False,
|
||||
"extra": '{"1749518432": "0", "1753581557": "0", "clean_finish": "{}"}',
|
||||
"sn": "q7_sn",
|
||||
"deviceStatus": {
|
||||
"135": 0,
|
||||
"120": 0,
|
||||
"121": 8,
|
||||
"122": 100,
|
||||
"123": 4,
|
||||
"124": 2,
|
||||
"125": 77,
|
||||
"126": 4294965348,
|
||||
"127": 54,
|
||||
"136": 1,
|
||||
"137": 1,
|
||||
"138": 0,
|
||||
"139": 0,
|
||||
"141": 0,
|
||||
"142": 0,
|
||||
},
|
||||
"silentOtaSwitch": False,
|
||||
"f": False,
|
||||
"createTime": 1749513706,
|
||||
"cid": "DE",
|
||||
"shareType": "UNLIMITED_TIME",
|
||||
},
|
||||
{
|
||||
"duid": "zeo_duid",
|
||||
"name": "Zeo One",
|
||||
@@ -1483,13 +1209,3 @@ SCENES = [
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
Q7_B01_PROPS = B01Props(
|
||||
status=WorkStatusMapping.SWEEP_MOPING,
|
||||
main_brush=5000,
|
||||
side_brush=3000,
|
||||
hypa=1500,
|
||||
main_sensor=500,
|
||||
mop_life=1200,
|
||||
real_clean_time=3000,
|
||||
)
|
||||
|
||||
@@ -1172,280 +1172,6 @@
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
'**REDACTED-4**': dict({
|
||||
'device': dict({
|
||||
'activeTime': 1749513705,
|
||||
'cid': 'DE',
|
||||
'createTime': 1749513706,
|
||||
'deviceStatus': dict({
|
||||
'120': 0,
|
||||
'121': 8,
|
||||
'122': 100,
|
||||
'123': 4,
|
||||
'124': 2,
|
||||
'125': 77,
|
||||
'126': 4294965348,
|
||||
'127': 54,
|
||||
'135': 0,
|
||||
'136': 1,
|
||||
'137': 1,
|
||||
'138': 0,
|
||||
'139': 0,
|
||||
'141': 0,
|
||||
'142': 0,
|
||||
}),
|
||||
'duid': '**REDACTED**',
|
||||
'extra': '{"1749518432": "0", "1753581557": "0", "clean_finish": "{}"}',
|
||||
'f': False,
|
||||
'fv': '03.01.71',
|
||||
'iconUrl': '',
|
||||
'localKey': '**REDACTED**',
|
||||
'name': 'Roborock Q7',
|
||||
'online': True,
|
||||
'productId': 'q7_product_id',
|
||||
'pv': 'B01',
|
||||
'share': True,
|
||||
'shareTime': 1754789238,
|
||||
'shareType': 'UNLIMITED_TIME',
|
||||
'silentOtaSwitch': False,
|
||||
'sn': '**REDACTED**',
|
||||
'timeZoneId': 'Pacific/Auckland',
|
||||
'tuyaMigrated': False,
|
||||
}),
|
||||
'product': dict({
|
||||
'capability': 0,
|
||||
'category': 'robot.vacuum.cleaner',
|
||||
'id': 'q7_product_id',
|
||||
'model': 'roborock.vacuum.sc01',
|
||||
'name': 'Roborock Q7 Series',
|
||||
'schema': list([
|
||||
dict({
|
||||
'code': 'rpc_request',
|
||||
'id': 101,
|
||||
'mode': 'rw',
|
||||
'name': 'RPC Request',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'rpc_response',
|
||||
'id': 102,
|
||||
'mode': 'rw',
|
||||
'name': 'RPC Response',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'error_code',
|
||||
'id': 120,
|
||||
'mode': 'ro',
|
||||
'name': '错误代码',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'state',
|
||||
'id': 121,
|
||||
'mode': 'ro',
|
||||
'name': '设备状态',
|
||||
'property': 'null',
|
||||
'type': 'VALUE',
|
||||
}),
|
||||
dict({
|
||||
'code': 'battery',
|
||||
'id': 122,
|
||||
'mode': 'ro',
|
||||
'name': '设备电量',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'fan_power',
|
||||
'id': 123,
|
||||
'mode': 'rw',
|
||||
'name': '吸力档位',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'water_box_mode',
|
||||
'id': 124,
|
||||
'mode': 'rw',
|
||||
'name': '拖地档位',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'main_brush_life',
|
||||
'id': 125,
|
||||
'mode': 'ro',
|
||||
'name': '主刷寿命',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'side_brush_life',
|
||||
'id': 126,
|
||||
'mode': 'ro',
|
||||
'name': '边刷寿命',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'filter_life',
|
||||
'id': 127,
|
||||
'mode': 'ro',
|
||||
'name': '滤网寿命',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'offline_status',
|
||||
'id': 135,
|
||||
'mode': 'ro',
|
||||
'name': '离线原因',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'clean_times',
|
||||
'id': 136,
|
||||
'mode': 'rw',
|
||||
'name': '清洁次数',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'cleaning_preference',
|
||||
'id': 137,
|
||||
'mode': 'rw',
|
||||
'name': '扫拖模式',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'clean_task_type',
|
||||
'id': 138,
|
||||
'mode': 'ro',
|
||||
'name': '清洁任务类型',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'back_type',
|
||||
'id': 139,
|
||||
'mode': 'ro',
|
||||
'name': '返回基站类型',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'cleaning_progress',
|
||||
'id': 141,
|
||||
'mode': 'ro',
|
||||
'name': '清洁进度',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'fc_state',
|
||||
'id': 142,
|
||||
'mode': 'ro',
|
||||
'name': '窜货信息',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'start_clean_task',
|
||||
'id': 201,
|
||||
'mode': 'wo',
|
||||
'name': '启动清洁任务',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'start_back_dock_task',
|
||||
'id': 202,
|
||||
'mode': 'wo',
|
||||
'name': '返回基站任务',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'start_dock_task',
|
||||
'id': 203,
|
||||
'mode': 'wo',
|
||||
'name': '启动基站任务',
|
||||
'property': '{"range": []}',
|
||||
'type': 'ENUM',
|
||||
}),
|
||||
dict({
|
||||
'code': 'pause',
|
||||
'id': 204,
|
||||
'mode': 'wo',
|
||||
'name': '暂停任务',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'resume',
|
||||
'id': 205,
|
||||
'mode': 'wo',
|
||||
'name': '继续任务',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'stop',
|
||||
'id': 206,
|
||||
'mode': 'wo',
|
||||
'name': '结束任务',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'request_cmd',
|
||||
'id': 10000,
|
||||
'mode': 'wo',
|
||||
'name': 'request_cmd',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'response_cmd',
|
||||
'id': 10001,
|
||||
'mode': 'ro',
|
||||
'name': 'response_cmd',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'request_map',
|
||||
'id': 10002,
|
||||
'mode': 'ro',
|
||||
'name': 'request_map',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'response_map',
|
||||
'id': 10003,
|
||||
'mode': 'ro',
|
||||
'name': 'response_map',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
dict({
|
||||
'code': 'event_report',
|
||||
'id': 10004,
|
||||
'mode': 'rw',
|
||||
'name': 'event_report',
|
||||
'property': 'null',
|
||||
'type': 'RAW',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -878,108 +878,5 @@
|
||||
'last_updated': <ANY>,
|
||||
'state': 'none',
|
||||
}),
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Roborock Q7 Status',
|
||||
'options': list([
|
||||
'sleeping',
|
||||
'waiting_for_orders',
|
||||
'paused',
|
||||
'docking',
|
||||
'charging',
|
||||
'sweep_moping',
|
||||
'sweep_moping_2',
|
||||
'moping',
|
||||
'updating',
|
||||
'mop_cleaning',
|
||||
'mop_airdrying',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.roborock_q7_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'sweep_moping',
|
||||
}),
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Roborock Q7 Main brush time left',
|
||||
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.roborock_q7_main_brush_time_left',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '216.666666666667',
|
||||
}),
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Roborock Q7 Side brush time left',
|
||||
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.roborock_q7_side_brush_time_left',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '150.0',
|
||||
}),
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Roborock Q7 Filter time left',
|
||||
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.roborock_q7_filter_time_left',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '125.0',
|
||||
}),
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Roborock Q7 Sensor time left',
|
||||
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.roborock_q7_sensor_time_left',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '21.6666666666667',
|
||||
}),
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Roborock Q7 Mop life time left',
|
||||
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.roborock_q7_mop_life_time_left',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '160.0',
|
||||
}),
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Roborock Q7 Total cleaning time',
|
||||
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.roborock_q7_total_cleaning_time',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '50.0',
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
|
||||
@@ -233,7 +233,6 @@ async def test_stale_device(
|
||||
"Roborock S7 2 Dock",
|
||||
"Dyad Pro",
|
||||
"Zeo One",
|
||||
"Roborock Q7",
|
||||
}
|
||||
fake_devices.pop(0) # Remove one robot
|
||||
|
||||
@@ -247,7 +246,6 @@ async def test_stale_device(
|
||||
"Roborock S7 2 Dock",
|
||||
"Dyad Pro",
|
||||
"Zeo One",
|
||||
"Roborock Q7",
|
||||
}
|
||||
|
||||
|
||||
@@ -271,7 +269,6 @@ async def test_no_stale_device(
|
||||
"Roborock S7 2 Dock",
|
||||
"Dyad Pro",
|
||||
"Zeo One",
|
||||
"Roborock Q7",
|
||||
}
|
||||
|
||||
await hass.config_entries.async_reload(mock_roborock_entry.entry_id)
|
||||
@@ -286,7 +283,6 @@ async def test_no_stale_device(
|
||||
"Roborock S7 2 Dock",
|
||||
"Dyad Pro",
|
||||
"Zeo One",
|
||||
"Roborock Q7",
|
||||
}
|
||||
|
||||
|
||||
@@ -444,7 +440,6 @@ async def test_zeo_device_fails_setup(
|
||||
"Roborock S7 2",
|
||||
"Roborock S7 2 Dock",
|
||||
"Dyad Pro",
|
||||
"Roborock Q7",
|
||||
# Zeo device is missing
|
||||
}
|
||||
|
||||
@@ -481,5 +476,4 @@ async def test_dyad_device_fails_setup(
|
||||
"Roborock S7 2 Dock",
|
||||
# Dyad device is missing
|
||||
"Zeo One",
|
||||
"Roborock Q7",
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
'platform': 'tuya',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <VacuumEntityFeature: 8984>,
|
||||
'supported_features': <VacuumEntityFeature: 13080>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'tuya.mwsaod7fa3gjyh6ids',
|
||||
'unit_of_measurement': None,
|
||||
@@ -38,7 +38,7 @@
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Hoover',
|
||||
'supported_features': <VacuumEntityFeature: 8984>,
|
||||
'supported_features': <VacuumEntityFeature: 13080>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'vacuum.hoover',
|
||||
|
||||
Reference in New Issue
Block a user