mirror of
https://github.com/home-assistant/core.git
synced 2026-01-04 22:28:06 +00:00
Compare commits
28 Commits
parametriz
...
rc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42ea7ecbd6 | ||
|
|
d58d08c350 | ||
|
|
65a259b9df | ||
|
|
cbfbfbee13 | ||
|
|
e503b37ddc | ||
|
|
217eef39f3 | ||
|
|
dcdbce9b21 | ||
|
|
71db8fe185 | ||
|
|
9b96cb66d5 | ||
|
|
78bccbbbc2 | ||
|
|
b0a8f9575c | ||
|
|
61104a9970 | ||
|
|
8d13dbdd0c | ||
|
|
9afb41004e | ||
|
|
cdd542f6e6 | ||
|
|
f520686002 | ||
|
|
e4d09bb615 | ||
|
|
10f6ccf6cc | ||
|
|
d9fa67b16f | ||
|
|
cf228ae02b | ||
|
|
cb4d62ab9a | ||
|
|
d2f75aec04 | ||
|
|
a609fbc07b | ||
|
|
1b9c7ae0ac | ||
|
|
492f2117fb | ||
|
|
2346f83635 | ||
|
|
8925bfb182 | ||
|
|
8f2b1f0eff |
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.2"
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
|
||||
# 10.3 is the oldest supported version
|
||||
|
||||
@@ -7,12 +7,11 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from bsblan import BSBLANError, DaySchedule, DHWSchedule, TimeSlot
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -34,27 +33,28 @@ ATTR_SUNDAY_SLOTS = "sunday_slots"
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
|
||||
|
||||
|
||||
# Schema for a single time slot
|
||||
_SLOT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("start_time"): cv.time,
|
||||
vol.Required("end_time"): cv.time,
|
||||
}
|
||||
)
|
||||
def _parse_time_value(value: time | str) -> time:
|
||||
"""Parse a time value from either a time object or string.
|
||||
|
||||
Raises ServiceValidationError if the format is invalid.
|
||||
"""
|
||||
if isinstance(value, time):
|
||||
return value
|
||||
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
vol.Optional(ATTR_MONDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_TUESDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_WEDNESDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_THURSDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_FRIDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_SATURDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_SUNDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
}
|
||||
)
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
parts = value.split(":")
|
||||
return time(int(parts[0]), int(parts[1]))
|
||||
except (ValueError, IndexError):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_time_format",
|
||||
) from None
|
||||
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_time_format",
|
||||
)
|
||||
|
||||
|
||||
def _convert_time_slots_to_day_schedule(
|
||||
@@ -62,8 +62,8 @@ def _convert_time_slots_to_day_schedule(
|
||||
) -> DaySchedule | None:
|
||||
"""Convert list of time slot dicts to a DaySchedule object.
|
||||
|
||||
Example: [{"start_time": time(6, 0), "end_time": time(8, 0)},
|
||||
{"start_time": time(17, 0), "end_time": time(21, 0)}]
|
||||
Example: [{"start_time": "06:00", "end_time": "08:00"},
|
||||
{"start_time": "17:00", "end_time": "21:00"}]
|
||||
becomes: DaySchedule with two TimeSlot objects
|
||||
|
||||
None returns None (don't modify this day).
|
||||
@@ -77,27 +77,31 @@ def _convert_time_slots_to_day_schedule(
|
||||
|
||||
time_slots = []
|
||||
for slot in slots:
|
||||
start_time = slot["start_time"]
|
||||
end_time = slot["end_time"]
|
||||
start = slot.get("start_time")
|
||||
end = slot.get("end_time")
|
||||
|
||||
# Validate that end time is after start time
|
||||
if end_time <= start_time:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_time_before_start_time",
|
||||
translation_placeholders={
|
||||
"start_time": start_time.strftime("%H:%M"),
|
||||
"end_time": end_time.strftime("%H:%M"),
|
||||
},
|
||||
if start and end:
|
||||
start_time = _parse_time_value(start)
|
||||
end_time = _parse_time_value(end)
|
||||
|
||||
# Validate that end time is after start time
|
||||
if end_time <= start_time:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_time_before_start_time",
|
||||
translation_placeholders={
|
||||
"start_time": start_time.strftime("%H:%M"),
|
||||
"end_time": end_time.strftime("%H:%M"),
|
||||
},
|
||||
)
|
||||
|
||||
time_slots.append(TimeSlot(start=start_time, end=end_time))
|
||||
LOGGER.debug(
|
||||
"Created time slot: %s-%s",
|
||||
start_time.strftime("%H:%M"),
|
||||
end_time.strftime("%H:%M"),
|
||||
)
|
||||
|
||||
time_slots.append(TimeSlot(start=start_time, end=end_time))
|
||||
LOGGER.debug(
|
||||
"Created time slot: %s-%s",
|
||||
start_time.strftime("%H:%M"),
|
||||
end_time.strftime("%H:%M"),
|
||||
)
|
||||
|
||||
LOGGER.debug("Created DaySchedule with %d slots", len(time_slots))
|
||||
return DaySchedule(slots=time_slots)
|
||||
|
||||
@@ -210,5 +214,4 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
DOMAIN,
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE,
|
||||
set_hot_water_schedule,
|
||||
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.1"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2025.12.2"]
|
||||
}
|
||||
|
||||
@@ -19,9 +19,6 @@ from .coordinator import FeedReaderCoordinator
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
ATTR_CONTENT = "content"
|
||||
ATTR_DESCRIPTION = "description"
|
||||
ATTR_LINK = "link"
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: missing test for uniqueness of feed URL.
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: missing data descriptions
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: No authentication support.
|
||||
test-coverage:
|
||||
status: done
|
||||
comment: Can use freezer for skipping time instead
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery support.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: No discovery support.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Each config entry, represents one service.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: Matches no available event entity class.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: Only one entity per config entry.
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: done
|
||||
comment: Only one repair-issue for yaml-import defined.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Each config entry, represents one service.
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: todo
|
||||
comment: feedparser lib is not async.
|
||||
inject-websession:
|
||||
status: todo
|
||||
comment: feedparser lib doesn't take a session as argument.
|
||||
strict-typing:
|
||||
status: todo
|
||||
comment: feedparser lib is not fully typed.
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.10"]
|
||||
"requirements": ["pyfirefly==0.1.8"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["librehardwaremonitor-api==1.7.2"]
|
||||
"requirements": ["librehardwaremonitor-api==1.6.0"]
|
||||
}
|
||||
|
||||
@@ -114,72 +114,32 @@ class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if user_input is not None:
|
||||
host = user_input[CONF_HOST]
|
||||
serial_number, api_versions, errors = await self._validate_host(host)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors=errors,
|
||||
# Handle API version info for error display; pass version info when available
|
||||
# or None when api_versions is None to avoid displaying version details
|
||||
description_placeholders={
|
||||
"api_version_is": api_versions.get("api_version_is") or "",
|
||||
"api_version_should": api_versions.get("api_version_should")
|
||||
or "",
|
||||
}
|
||||
if api_versions
|
||||
else None,
|
||||
)
|
||||
|
||||
await self.async_set_unique_id(serial_number, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"PoolDose {serial_number}",
|
||||
data={CONF_HOST: host},
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfigure to change the device host/IP for an existing entry."""
|
||||
if user_input is not None:
|
||||
host = user_input[CONF_HOST]
|
||||
serial_number, api_versions, errors = await self._validate_host(host)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors=errors,
|
||||
# Handle API version info for error display identical to other steps
|
||||
description_placeholders={
|
||||
"api_version_is": api_versions.get("api_version_is") or "",
|
||||
"api_version_should": api_versions.get("api_version_should")
|
||||
or "",
|
||||
}
|
||||
if api_versions
|
||||
else None,
|
||||
)
|
||||
|
||||
# Ensure new serial number matches the existing entry unique_id (serial number)
|
||||
if serial_number != self._get_reconfigure_entry().unique_id:
|
||||
return self.async_abort(reason="wrong_device")
|
||||
|
||||
# Update the existing config entry with the new host and schedule reload
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(), data_updates={CONF_HOST: host}
|
||||
host = user_input[CONF_HOST]
|
||||
serial_number, api_versions, errors = await self._validate_host(host)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors=errors,
|
||||
# Handle API version info for error display; pass version info when available
|
||||
# or None when api_versions is None to avoid displaying version details
|
||||
description_placeholders={
|
||||
"api_version_is": api_versions.get("api_version_is") or "",
|
||||
"api_version_should": api_versions.get("api_version_should") or "",
|
||||
}
|
||||
if api_versions
|
||||
else None,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
# Pre-fill with current host from the entry being reconfigured
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
SCHEMA_DEVICE, self._get_reconfigure_entry().data
|
||||
),
|
||||
await self.async_set_unique_id(serial_number, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"PoolDose {serial_number}",
|
||||
data={CONF_HOST: host},
|
||||
)
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
"""Diagnostics support for Pooldose."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import PooldoseConfigEntry
|
||||
|
||||
TO_REDACT = {
|
||||
"IP",
|
||||
"MAC",
|
||||
"WIFI_SSID",
|
||||
"AP_SSID",
|
||||
"SERIAL_NUMBER",
|
||||
"DEVICE_ID",
|
||||
"OWNERID",
|
||||
"NAME",
|
||||
"GROUPNAME",
|
||||
}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: PooldoseConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
return {
|
||||
"device_info": async_redact_data(coordinator.device_info, TO_REDACT),
|
||||
"data": coordinator.data,
|
||||
}
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
@@ -60,7 +60,7 @@ rules:
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration does not provide repair issues, as it is designed for a single PoolDose device with a fixed configuration.
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_device_info": "Unable to retrieve device information",
|
||||
"no_serial_number": "No serial number found on the device",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_device": "The provided device does not match the configured device"
|
||||
"no_serial_number": "No serial number found on the device"
|
||||
},
|
||||
"error": {
|
||||
"api_not_set": "API version not found in device response. Device firmware may not be compatible with this integration.",
|
||||
@@ -22,14 +20,6 @@
|
||||
"description": "A PoolDose device was found on your network at {ip} with MAC address {mac}.\n\nDo you want to add {name} to Home Assistant?",
|
||||
"title": "Confirm DHCP discovered PoolDose device"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::pooldose::config::step::user::data_description::host%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
|
||||
@@ -19,7 +19,6 @@ from homeassistant.components.light import (
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from .entity import (
|
||||
ReolinkChannelCoordinatorEntity,
|
||||
@@ -158,16 +157,16 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
|
||||
|
||||
@property
|
||||
def brightness(self) -> int | None:
|
||||
"""Return the brightness of this light between 1.255."""
|
||||
"""Return the brightness of this light between 0.255."""
|
||||
assert self.entity_description.get_brightness_fn is not None
|
||||
|
||||
bright_pct = self.entity_description.get_brightness_fn(
|
||||
self._host.api, self._channel
|
||||
)
|
||||
if not bright_pct:
|
||||
if bright_pct is None:
|
||||
return None
|
||||
|
||||
return color_util.value_to_brightness((1, 100), bright_pct)
|
||||
return round(255 * bright_pct / 100.0)
|
||||
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
@@ -190,7 +189,7 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
|
||||
if (
|
||||
brightness := kwargs.get(ATTR_BRIGHTNESS)
|
||||
) is not None and self.entity_description.set_brightness_fn is not None:
|
||||
brightness_pct = round(color_util.brightness_to_value((1, 100), brightness))
|
||||
brightness_pct = int(brightness / 255.0 * 100)
|
||||
await self.entity_description.set_brightness_fn(
|
||||
self._host.api, self._channel, brightness_pct
|
||||
)
|
||||
|
||||
@@ -552,7 +552,6 @@ class RoborockB01Q7UpdateCoordinator(RoborockDataUpdateCoordinatorB01):
|
||||
RoborockB01Props.CLEANING_TIME,
|
||||
RoborockB01Props.REAL_CLEAN_TIME,
|
||||
RoborockB01Props.HYPA,
|
||||
RoborockB01Props.WIND,
|
||||
]
|
||||
|
||||
async def _async_update_data(
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from roborock.data import RoborockStateCode, SCWindMapping, WorkStatusMapping
|
||||
from roborock.data import RoborockStateCode
|
||||
from roborock.exceptions import RoborockException
|
||||
from roborock.roborock_typing import RoborockCommand
|
||||
import voluptuous as vol
|
||||
@@ -24,12 +24,8 @@ from .const import (
|
||||
GET_VACUUM_CURRENT_POSITION_SERVICE_NAME,
|
||||
SET_VACUUM_GOTO_POSITION_SERVICE_NAME,
|
||||
)
|
||||
from .coordinator import (
|
||||
RoborockB01Q7UpdateCoordinator,
|
||||
RoborockConfigEntry,
|
||||
RoborockDataUpdateCoordinator,
|
||||
)
|
||||
from .entity import RoborockCoordinatedEntityB01, RoborockCoordinatedEntityV1
|
||||
from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator
|
||||
from .entity import RoborockCoordinatedEntityV1
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -61,20 +57,6 @@ STATE_CODE_TO_STATE = {
|
||||
RoborockStateCode.device_offline: VacuumActivity.ERROR, # "Device offline"
|
||||
}
|
||||
|
||||
Q7_STATE_CODE_TO_STATE = {
|
||||
WorkStatusMapping.SLEEPING: VacuumActivity.IDLE,
|
||||
WorkStatusMapping.WAITING_FOR_ORDERS: VacuumActivity.IDLE,
|
||||
WorkStatusMapping.PAUSED: VacuumActivity.PAUSED,
|
||||
WorkStatusMapping.DOCKING: VacuumActivity.RETURNING,
|
||||
WorkStatusMapping.CHARGING: VacuumActivity.DOCKED,
|
||||
WorkStatusMapping.SWEEP_MOPING: VacuumActivity.CLEANING,
|
||||
WorkStatusMapping.SWEEP_MOPING_2: VacuumActivity.CLEANING,
|
||||
WorkStatusMapping.MOPING: VacuumActivity.CLEANING,
|
||||
WorkStatusMapping.UPDATING: VacuumActivity.DOCKED,
|
||||
WorkStatusMapping.MOP_CLEANING: VacuumActivity.DOCKED,
|
||||
WorkStatusMapping.MOP_AIRDRYING: VacuumActivity.DOCKED,
|
||||
}
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@@ -87,11 +69,6 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
RoborockVacuum(coordinator) for coordinator in config_entry.runtime_data.v1
|
||||
)
|
||||
async_add_entities(
|
||||
RoborockQ7Vacuum(coordinator)
|
||||
for coordinator in config_entry.runtime_data.b01
|
||||
if isinstance(coordinator, RoborockB01Q7UpdateCoordinator)
|
||||
)
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
|
||||
platform.async_register_entity_service(
|
||||
@@ -264,149 +241,3 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity):
|
||||
"x": robot_position.x,
|
||||
"y": robot_position.y,
|
||||
}
|
||||
|
||||
|
||||
class RoborockQ7Vacuum(RoborockCoordinatedEntityB01, StateVacuumEntity):
|
||||
"""General Representation of a Roborock vacuum."""
|
||||
|
||||
_attr_icon = "mdi:robot-vacuum"
|
||||
_attr_supported_features = (
|
||||
VacuumEntityFeature.PAUSE
|
||||
| VacuumEntityFeature.STOP
|
||||
| VacuumEntityFeature.RETURN_HOME
|
||||
| VacuumEntityFeature.FAN_SPEED
|
||||
| VacuumEntityFeature.SEND_COMMAND
|
||||
| VacuumEntityFeature.LOCATE
|
||||
| VacuumEntityFeature.STATE
|
||||
| VacuumEntityFeature.START
|
||||
)
|
||||
_attr_translation_key = DOMAIN
|
||||
_attr_name = None
|
||||
coordinator: RoborockB01Q7UpdateCoordinator
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: RoborockB01Q7UpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize a vacuum."""
|
||||
StateVacuumEntity.__init__(self)
|
||||
RoborockCoordinatedEntityB01.__init__(
|
||||
self,
|
||||
coordinator.duid_slug,
|
||||
coordinator,
|
||||
)
|
||||
|
||||
@property
|
||||
def fan_speed_list(self) -> list[str]:
|
||||
"""Get the list of available fan speeds."""
|
||||
return SCWindMapping.keys()
|
||||
|
||||
@property
|
||||
def activity(self) -> VacuumActivity | None:
|
||||
"""Return the status of the vacuum cleaner."""
|
||||
if self.coordinator.data.status is not None:
|
||||
return Q7_STATE_CODE_TO_STATE.get(self.coordinator.data.status)
|
||||
return None
|
||||
|
||||
@property
|
||||
def fan_speed(self) -> str | None:
|
||||
"""Return the fan speed of the vacuum cleaner."""
|
||||
return self.coordinator.data.wind_name
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start the vacuum."""
|
||||
try:
|
||||
await self.coordinator.api.start_clean()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "start_clean",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_pause(self) -> None:
|
||||
"""Pause the vacuum."""
|
||||
try:
|
||||
await self.coordinator.api.pause_clean()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "pause_clean",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_stop(self, **kwargs: Any) -> None:
|
||||
"""Stop the vacuum."""
|
||||
try:
|
||||
await self.coordinator.api.stop_clean()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "stop_clean",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_return_to_base(self, **kwargs: Any) -> None:
|
||||
"""Send vacuum back to base."""
|
||||
try:
|
||||
await self.coordinator.api.return_to_dock()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "return_to_dock",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_locate(self, **kwargs: Any) -> None:
|
||||
"""Locate vacuum."""
|
||||
try:
|
||||
await self.coordinator.api.find_me()
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "find_me",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None:
|
||||
"""Set vacuum fan speed."""
|
||||
try:
|
||||
await self.coordinator.api.set_fan_speed(
|
||||
SCWindMapping.from_value(fan_speed)
|
||||
)
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "set_fan_speed",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_send_command(
|
||||
self,
|
||||
command: str,
|
||||
params: dict[str, Any] | list[Any] | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Send a command to a vacuum cleaner."""
|
||||
try:
|
||||
await self.coordinator.api.send(command, params)
|
||||
except RoborockException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": command,
|
||||
},
|
||||
) from err
|
||||
|
||||
@@ -37,7 +37,6 @@ SONOS_RADIO = "radio"
|
||||
SONOS_SHARE = "share"
|
||||
SONOS_OTHER_ITEM = "other items"
|
||||
SONOS_AUDIO_BOOK = "audio book"
|
||||
SONOS_PODCAST = "podcast"
|
||||
|
||||
MEDIA_TYPE_DIRECTORY = MediaClass.DIRECTORY
|
||||
|
||||
@@ -67,7 +66,6 @@ SONOS_TO_MEDIA_CLASSES = {
|
||||
SONOS_COMPOSER: MediaClass.COMPOSER,
|
||||
SONOS_GENRE: MediaClass.GENRE,
|
||||
SONOS_PLAYLISTS: MediaClass.PLAYLIST,
|
||||
SONOS_PODCAST: MediaClass.PODCAST,
|
||||
SONOS_TRACKS: MediaClass.TRACK,
|
||||
SONOS_SHARE: MediaClass.DIRECTORY,
|
||||
"object.container": MediaClass.DIRECTORY,
|
||||
@@ -77,7 +75,6 @@ SONOS_TO_MEDIA_CLASSES = {
|
||||
"object.container.person.musicArtist": MediaClass.ARTIST,
|
||||
"object.container.playlistContainer.sameArtist": MediaClass.ARTIST,
|
||||
"object.container.playlistContainer": MediaClass.PLAYLIST,
|
||||
"object.container.podcast": MediaClass.PODCAST,
|
||||
"object.item": MediaClass.TRACK,
|
||||
"object.item.audioItem.musicTrack": MediaClass.TRACK,
|
||||
"object.item.audioItem.audioBroadcast": MediaClass.GENRE,
|
||||
@@ -91,7 +88,6 @@ SONOS_TO_MEDIA_TYPES = {
|
||||
SONOS_COMPOSER: MediaType.COMPOSER,
|
||||
SONOS_GENRE: MediaType.GENRE,
|
||||
SONOS_PLAYLISTS: MediaType.PLAYLIST,
|
||||
SONOS_PODCAST: MediaType.PODCAST,
|
||||
SONOS_TRACKS: MediaType.TRACK,
|
||||
"object.container": MEDIA_TYPE_DIRECTORY,
|
||||
"object.container.album.musicAlbum": MediaType.ALBUM,
|
||||
@@ -100,7 +96,6 @@ SONOS_TO_MEDIA_TYPES = {
|
||||
"object.container.person.musicArtist": MediaType.ARTIST,
|
||||
"object.container.playlistContainer.sameArtist": MediaType.ARTIST,
|
||||
"object.container.playlistContainer": MediaType.PLAYLIST,
|
||||
"object.container.podcast": MediaType.PODCAST,
|
||||
"object.item.audioItem.musicTrack": MediaType.TRACK,
|
||||
"object.item.audioItem.audioBook": MediaType.TRACK,
|
||||
}
|
||||
@@ -130,7 +125,6 @@ SONOS_TYPES_MAPPING = {
|
||||
"object.container.person.musicArtist": SONOS_ALBUM_ARTIST,
|
||||
"object.container.playlistContainer.sameArtist": SONOS_ARTIST,
|
||||
"object.container.playlistContainer": SONOS_PLAYLISTS,
|
||||
"object.container.podcast": SONOS_PODCAST,
|
||||
"object.item": SONOS_OTHER_ITEM,
|
||||
"object.item.audioItem.musicTrack": SONOS_TRACKS,
|
||||
"object.item.audioItem.audioBroadcast": SONOS_RADIO,
|
||||
@@ -155,7 +149,6 @@ PLAYABLE_MEDIA_TYPES = [
|
||||
MediaType.CONTRIBUTING_ARTIST,
|
||||
MediaType.GENRE,
|
||||
MediaType.PLAYLIST,
|
||||
MediaType.PODCAST,
|
||||
MediaType.TRACK,
|
||||
]
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"quality_scale": "bronze",
|
||||
"requirements": [
|
||||
"defusedxml==0.7.1",
|
||||
"soco==0.30.14",
|
||||
"soco==0.30.13",
|
||||
"sonos-websocket==0.1.3"
|
||||
],
|
||||
"ssdp": [
|
||||
|
||||
@@ -958,23 +958,6 @@ class SonosSpeaker:
|
||||
# as those "invisible" speakers will bypass the single speaker check
|
||||
return
|
||||
|
||||
# Clear coordinator on speakers that are no longer in this group
|
||||
old_members = set(self.sonos_group[1:])
|
||||
new_members = set(sonos_group[1:])
|
||||
removed_members = old_members - new_members
|
||||
for removed_speaker in removed_members:
|
||||
# Only clear if this speaker was coordinated by self and in the same group
|
||||
if (
|
||||
removed_speaker.coordinator == self
|
||||
and removed_speaker.sonos_group is self.sonos_group
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"Zone %s Cleared coordinator [%s] (removed from group)",
|
||||
removed_speaker.zone_name,
|
||||
self.zone_name,
|
||||
)
|
||||
removed_speaker.clear_coordinator()
|
||||
|
||||
self.coordinator = None
|
||||
self.sonos_group = sonos_group
|
||||
self.sonos_group_entities = sonos_group_entities
|
||||
@@ -1007,19 +990,6 @@ class SonosSpeaker:
|
||||
|
||||
return _async_handle_group_event(event)
|
||||
|
||||
@callback
|
||||
def clear_coordinator(self) -> None:
|
||||
"""Clear coordinator from speaker."""
|
||||
self.coordinator = None
|
||||
self.sonos_group = [self]
|
||||
entity_registry = er.async_get(self.hass)
|
||||
speaker_entity_id = cast(
|
||||
str,
|
||||
entity_registry.async_get_entity_id(MP_DOMAIN, DOMAIN, self.uid),
|
||||
)
|
||||
self.sonos_group_entities = [speaker_entity_id]
|
||||
self.async_write_entity_states()
|
||||
|
||||
@soco_error()
|
||||
def join(self, speakers: list[SonosSpeaker]) -> list[SonosSpeaker]:
|
||||
"""Form a group with other players."""
|
||||
@@ -1068,6 +1038,7 @@ class SonosSpeaker:
|
||||
if self.sonos_group == [self]:
|
||||
return
|
||||
self.soco.unjoin()
|
||||
self.coordinator = None
|
||||
|
||||
@staticmethod
|
||||
async def unjoin_multi(
|
||||
|
||||
@@ -7,14 +7,9 @@ from typing import Any
|
||||
from srpenergy.client import SrpEnergyClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
SOURCE_USER,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ID, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import CONF_IS_TOU, DOMAIN, LOGGER
|
||||
@@ -45,71 +40,52 @@ class SRPEnergyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@callback
|
||||
def _show_form(self, errors: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Show the form to the user."""
|
||||
LOGGER.debug("Show Form")
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_NAME, default=self.hass.config.location_name
|
||||
): str,
|
||||
vol.Required(CONF_ID): str,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Optional(CONF_IS_TOU, default=False): bool,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
LOGGER.debug("Config entry")
|
||||
errors: dict[str, str] = {}
|
||||
if user_input:
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except ValueError:
|
||||
# Thrown when the account id is malformed
|
||||
errors["base"] = "invalid_account"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_ID])
|
||||
if self.source == SOURCE_USER:
|
||||
self._abort_if_unique_id_configured()
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
if not user_input:
|
||||
return self._show_form(errors)
|
||||
|
||||
if self.source == SOURCE_USER:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME],
|
||||
data=user_input,
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data=user_input,
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ID): (
|
||||
str
|
||||
if self.source == SOURCE_USER
|
||||
else self._get_reconfigure_entry().data[CONF_ID]
|
||||
),
|
||||
vol.Required(
|
||||
CONF_NAME, default=self.hass.config.location_name
|
||||
): str,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Optional(CONF_IS_TOU, default=False): bool,
|
||||
}
|
||||
),
|
||||
suggested_values=(
|
||||
user_input or self._get_reconfigure_entry().data
|
||||
if self.source == SOURCE_RECONFIGURE
|
||||
else None
|
||||
),
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except ValueError:
|
||||
# Thrown when the account id is malformed
|
||||
errors["base"] = "invalid_account"
|
||||
return self._show_form(errors)
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
return self._show_form(errors)
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration."""
|
||||
return await self.async_step_user()
|
||||
await self.async_set_unique_id(user_input[CONF_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(title=user_input[CONF_NAME], data=user_input)
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: todo
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
`test_user` initializes flow with `None` data
|
||||
`test_user` imports a fixture that already patches, but then patches again
|
||||
`test_user` doesn't continue the old flow but creates a second one
|
||||
`test_user` can be parametrized to test the false SSL part
|
||||
`test_user_2sa` directly initialized the flow with form data
|
||||
Flows should end in CREATE_ENTRY or ABORT
|
||||
dependency-transparency: done
|
||||
docs-actions: todo
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name:
|
||||
status: todo
|
||||
comment: button entities missing
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
Handled by coordinator.
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
consts.py -> const.py
|
||||
fixture could be autospecced and also be combined with the config flow one
|
||||
Consider creating a fixture of the mock config entry
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: done
|
||||
comment: Could add serial number to camera device
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: todo
|
||||
comment: cameras and disks can be replaced and removed
|
||||
entity-category:
|
||||
status: todo
|
||||
comment: CPU load sounds like diagnostic data
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations:
|
||||
status: todo
|
||||
comment: button still has names, can use placeholders
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: todo
|
||||
comment: button still has icons
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: see dynamic-devices
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing:
|
||||
status: done
|
||||
comment: Would be nice if we can get rid of getattr
|
||||
@@ -19,7 +19,7 @@ from homeassistant.const import CONF_ADDRESS, CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
|
||||
from .const import CONF_HAS_AMBIENT, CONF_PROBE_COUNT, DOMAIN
|
||||
from .const import CONF_PROBE_COUNT, DOMAIN
|
||||
from .coordinator import LOGGER
|
||||
|
||||
_TIMEOUT = 10
|
||||
@@ -48,7 +48,6 @@ async def read_config_data(
|
||||
CONF_MODEL: info.name,
|
||||
CONF_ADDRESS: info.address,
|
||||
CONF_PROBE_COUNT: packet_a0.probe_count,
|
||||
CONF_HAS_AMBIENT: packet_a0.ambient,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -5,5 +5,4 @@ DOMAIN = "togrill"
|
||||
MAX_PROBE_COUNT = 6
|
||||
|
||||
CONF_PROBE_COUNT = "probe_count"
|
||||
CONF_HAS_AMBIENT = "has_ambient"
|
||||
CONF_VERSION = "version"
|
||||
|
||||
@@ -20,7 +20,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import ToGrillConfigEntry
|
||||
from .const import CONF_HAS_AMBIENT, CONF_PROBE_COUNT, MAX_PROBE_COUNT
|
||||
from .const import CONF_PROBE_COUNT, MAX_PROBE_COUNT
|
||||
from .coordinator import ToGrillCoordinator
|
||||
from .entity import ToGrillEntity
|
||||
|
||||
@@ -63,27 +63,6 @@ def _get_temperature_description(probe_number: int):
|
||||
)
|
||||
|
||||
|
||||
def _get_ambient_temperature(packet: Packet) -> StateType:
|
||||
"""Extract ambient temperature from packet.
|
||||
|
||||
The ambient temperature is the last value in the temperatures list
|
||||
when the device has an ambient sensor.
|
||||
"""
|
||||
assert isinstance(packet, PacketA1Notify)
|
||||
if not packet.temperatures:
|
||||
return None
|
||||
# Ambient is always the last temperature value
|
||||
temperature = packet.temperatures[-1]
|
||||
if temperature is None:
|
||||
return None
|
||||
return temperature
|
||||
|
||||
|
||||
def _ambient_supported(config: Mapping[str, Any]) -> bool:
|
||||
"""Check if ambient sensor is supported."""
|
||||
return config.get(CONF_HAS_AMBIENT, False)
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS = (
|
||||
ToGrillSensorEntityDescription(
|
||||
key="battery",
|
||||
@@ -99,17 +78,6 @@ ENTITY_DESCRIPTIONS = (
|
||||
_get_temperature_description(probe_number)
|
||||
for probe_number in range(1, MAX_PROBE_COUNT + 1)
|
||||
],
|
||||
ToGrillSensorEntityDescription(
|
||||
key="ambient_temperature",
|
||||
translation_key="ambient_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
packet_type=PacketA1Notify.type,
|
||||
packet_extract=_get_ambient_temperature,
|
||||
entity_supported=_ambient_supported,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -98,11 +98,6 @@
|
||||
"well_done": "Well done"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"ambient_temperature": {
|
||||
"name": "Ambient temperature"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/totalconnect",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["total_connect_client"],
|
||||
"requirements": ["total-connect-client==2025.12.2"]
|
||||
"requirements": ["total-connect-client==2025.5"]
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from .const import CONF_EVENTS, DOMAIN
|
||||
from .coordinator import TraccarServerConfigEntry, TraccarServerCoordinator
|
||||
from .coordinator import TraccarServerCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -33,9 +33,7 @@ PLATFORMS: list[Platform] = [
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: TraccarServerConfigEntry
|
||||
) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Traccar Server from a config entry."""
|
||||
if CONF_API_TOKEN not in entry.data:
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -63,7 +61,8 @@ async def async_setup_entry(
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(async_reload_entry))
|
||||
@@ -87,16 +86,14 @@ async def async_setup_entry(
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: TraccarServerConfigEntry
|
||||
) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_reload_entry(
|
||||
hass: HomeAssistant, entry: TraccarServerConfigEntry
|
||||
) -> None:
|
||||
async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle an options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
@@ -13,11 +13,13 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import TraccarServerConfigEntry, TraccarServerCoordinator
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TraccarServerCoordinator
|
||||
from .entity import TraccarServerEntity
|
||||
|
||||
|
||||
@@ -52,18 +54,18 @@ TRACCAR_SERVER_BINARY_SENSOR_ENTITY_DESCRIPTIONS: tuple[
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: TraccarServerConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up binary sensor entities."""
|
||||
coordinator = entry.runtime_data
|
||||
coordinator: TraccarServerCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities(
|
||||
TraccarServerBinarySensor(
|
||||
coordinator=coordinator,
|
||||
device=device_entry["device"],
|
||||
device=entry["device"],
|
||||
description=description,
|
||||
)
|
||||
for device_entry in coordinator.data.values()
|
||||
for entry in coordinator.data.values()
|
||||
for description in TRACCAR_SERVER_BINARY_SENSOR_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
@@ -35,8 +35,6 @@ from .const import (
|
||||
)
|
||||
from .helpers import get_device, get_first_geofence, get_geofence_ids
|
||||
|
||||
type TraccarServerConfigEntry = ConfigEntry[TraccarServerCoordinator]
|
||||
|
||||
|
||||
class TraccarServerCoordinatorDataDevice(TypedDict):
|
||||
"""Traccar Server coordinator data."""
|
||||
@@ -53,12 +51,12 @@ type TraccarServerCoordinatorData = dict[int, TraccarServerCoordinatorDataDevice
|
||||
class TraccarServerCoordinator(DataUpdateCoordinator[TraccarServerCoordinatorData]):
|
||||
"""Class to manage fetching Traccar Server data."""
|
||||
|
||||
config_entry: TraccarServerConfigEntry
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: TraccarServerConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
client: ApiClient,
|
||||
) -> None:
|
||||
"""Initialize global Traccar Server data updater."""
|
||||
|
||||
@@ -5,24 +5,25 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.device_tracker import TrackerEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ATTR_CATEGORY, ATTR_TRACCAR_ID, ATTR_TRACKER, DOMAIN
|
||||
from .coordinator import TraccarServerConfigEntry
|
||||
from .coordinator import TraccarServerCoordinator
|
||||
from .entity import TraccarServerEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: TraccarServerConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up device tracker entities."""
|
||||
coordinator = entry.runtime_data
|
||||
coordinator: TraccarServerCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities(
|
||||
TraccarServerDeviceTracker(coordinator, device_entry["device"])
|
||||
for device_entry in coordinator.data.values()
|
||||
TraccarServerDeviceTracker(coordinator, entry["device"])
|
||||
for entry in coordinator.data.values()
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,11 +5,13 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import REDACTED, async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .coordinator import TraccarServerConfigEntry, TraccarServerCoordinator
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TraccarServerCoordinator
|
||||
|
||||
KEYS_TO_REDACT = {
|
||||
"area", # This is the polygon area of a geofence
|
||||
@@ -37,10 +39,10 @@ def _entity_state(
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
config_entry: TraccarServerConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator: TraccarServerCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
entities = er.async_entries_for_config_entry(
|
||||
@@ -69,11 +71,11 @@ async def async_get_config_entry_diagnostics(
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: TraccarServerConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
device: dr.DeviceEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return device diagnostics."""
|
||||
coordinator = entry.runtime_data
|
||||
coordinator: TraccarServerCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
entities = er.async_entries_for_device(
|
||||
@@ -83,7 +85,6 @@ async def async_get_device_diagnostics(
|
||||
)
|
||||
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
return async_redact_data(
|
||||
{
|
||||
"subscription_status": coordinator.client.subscription_status,
|
||||
|
||||
@@ -14,12 +14,14 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfLength, UnitOfSpeed
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import TraccarServerConfigEntry, TraccarServerCoordinator
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TraccarServerCoordinator
|
||||
from .entity import TraccarServerEntity
|
||||
|
||||
|
||||
@@ -80,18 +82,18 @@ TRACCAR_SERVER_SENSOR_ENTITY_DESCRIPTIONS: tuple[
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: TraccarServerConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensor entities."""
|
||||
coordinator = entry.runtime_data
|
||||
coordinator: TraccarServerCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities(
|
||||
TraccarServerSensor(
|
||||
coordinator=coordinator,
|
||||
device=device_entry["device"],
|
||||
device=entry["device"],
|
||||
description=description,
|
||||
)
|
||||
for device_entry in coordinator.data.values()
|
||||
for entry in coordinator.data.values()
|
||||
for description in TRACCAR_SERVER_SENSOR_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
@@ -39,7 +39,6 @@ from .const import (
|
||||
SERVER_UNAVAILABLE,
|
||||
SWITCH_KEY_MAP,
|
||||
TRACKER_HARDWARE_STATUS_UPDATED,
|
||||
TRACKER_HEALTH_OVERVIEW_UPDATED,
|
||||
TRACKER_POSITION_UPDATED,
|
||||
TRACKER_SWITCH_STATUS_UPDATED,
|
||||
TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
@@ -65,7 +64,6 @@ class Trackables:
|
||||
tracker_details: dict
|
||||
hw_info: dict
|
||||
pos_report: dict
|
||||
health_overview: dict
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
@@ -116,11 +114,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TractiveConfigEntry) ->
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Send initial health overview data to sensors after platforms are set up
|
||||
for item in filtered_trackables:
|
||||
if item.health_overview:
|
||||
tractive.send_health_overview_update(item.health_overview)
|
||||
|
||||
async def cancel_listen_task(_: Event) -> None:
|
||||
await tractive.unsubscribe()
|
||||
|
||||
@@ -151,13 +144,9 @@ async def _generate_trackables(
|
||||
return None
|
||||
|
||||
tracker = client.tracker(trackable["device_id"])
|
||||
trackable_pet = client.trackable_object(trackable["_id"])
|
||||
|
||||
tracker_details, hw_info, pos_report, health_overview = await asyncio.gather(
|
||||
tracker.details(),
|
||||
tracker.hw_info(),
|
||||
tracker.pos_report(),
|
||||
trackable_pet.health_overview(),
|
||||
tracker_details, hw_info, pos_report = await asyncio.gather(
|
||||
tracker.details(), tracker.hw_info(), tracker.pos_report()
|
||||
)
|
||||
|
||||
if not tracker_details.get("_id"):
|
||||
@@ -165,9 +154,7 @@ async def _generate_trackables(
|
||||
f"Tractive API returns incomplete data for tracker {trackable['device_id']}",
|
||||
)
|
||||
|
||||
return Trackables(
|
||||
tracker, trackable, tracker_details, hw_info, pos_report, health_overview
|
||||
)
|
||||
return Trackables(tracker, trackable, tracker_details, hw_info, pos_report)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: TractiveConfigEntry) -> bool:
|
||||
@@ -239,9 +226,6 @@ class TractiveClient:
|
||||
if server_was_unavailable:
|
||||
_LOGGER.debug("Tractive is back online")
|
||||
server_was_unavailable = False
|
||||
if event["message"] == "health_overview":
|
||||
self.send_health_overview_update(event)
|
||||
continue
|
||||
if event["message"] == "wellness_overview":
|
||||
self._send_wellness_update(event)
|
||||
continue
|
||||
@@ -332,27 +316,6 @@ class TractiveClient:
|
||||
TRACKER_WELLNESS_STATUS_UPDATED, event["pet_id"], payload
|
||||
)
|
||||
|
||||
def send_health_overview_update(self, event: dict[str, Any]) -> None:
|
||||
"""Handle health_overview events from Tractive API."""
|
||||
# The health_overview response can be at root level or wrapped in 'content'
|
||||
# Handle both structures for compatibility
|
||||
data = event.get("content", event)
|
||||
|
||||
activity = data.get("activity", {})
|
||||
sleep = data.get("sleep", {})
|
||||
|
||||
payload = {
|
||||
ATTR_DAILY_GOAL: activity.get("minutesGoal"),
|
||||
ATTR_MINUTES_ACTIVE: activity.get("minutesActive"),
|
||||
ATTR_MINUTES_DAY_SLEEP: sleep.get("minutesDaySleep"),
|
||||
ATTR_MINUTES_NIGHT_SLEEP: sleep.get("minutesNightSleep"),
|
||||
# Calm minutes can be used as rest indicator
|
||||
ATTR_MINUTES_REST: sleep.get("minutesCalm"),
|
||||
}
|
||||
self._dispatch_tracker_event(
|
||||
TRACKER_HEALTH_OVERVIEW_UPDATED, data["petId"], payload
|
||||
)
|
||||
|
||||
def _send_position_update(self, event: dict[str, Any]) -> None:
|
||||
payload = {
|
||||
"latitude": event["position"]["latlong"][0],
|
||||
|
||||
@@ -28,7 +28,6 @@ TRACKER_HARDWARE_STATUS_UPDATED = f"{DOMAIN}_tracker_hardware_status_updated"
|
||||
TRACKER_POSITION_UPDATED = f"{DOMAIN}_tracker_position_updated"
|
||||
TRACKER_SWITCH_STATUS_UPDATED = f"{DOMAIN}_tracker_switch_updated"
|
||||
TRACKER_WELLNESS_STATUS_UPDATED = f"{DOMAIN}_tracker_wellness_updated"
|
||||
TRACKER_HEALTH_OVERVIEW_UPDATED = f"{DOMAIN}_tracker_health_overview_updated"
|
||||
|
||||
SERVER_UNAVAILABLE = f"{DOMAIN}_server_unavailable"
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ from .const import (
|
||||
ATTR_SLEEP_LABEL,
|
||||
ATTR_TRACKER_STATE,
|
||||
TRACKER_HARDWARE_STATUS_UPDATED,
|
||||
TRACKER_HEALTH_OVERVIEW_UPDATED,
|
||||
TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
)
|
||||
from .entity import TractiveEntity
|
||||
@@ -116,14 +115,14 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = (
|
||||
key=ATTR_MINUTES_ACTIVE,
|
||||
translation_key="activity_time",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
|
||||
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
TractiveSensorEntityDescription(
|
||||
key=ATTR_MINUTES_REST,
|
||||
translation_key="rest_time",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
|
||||
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
TractiveSensorEntityDescription(
|
||||
@@ -137,20 +136,20 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = (
|
||||
key=ATTR_DAILY_GOAL,
|
||||
translation_key="daily_goal",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
|
||||
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
),
|
||||
TractiveSensorEntityDescription(
|
||||
key=ATTR_MINUTES_DAY_SLEEP,
|
||||
translation_key="minutes_day_sleep",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
|
||||
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
TractiveSensorEntityDescription(
|
||||
key=ATTR_MINUTES_NIGHT_SLEEP,
|
||||
translation_key="minutes_night_sleep",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
|
||||
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
TractiveSensorEntityDescription(
|
||||
|
||||
@@ -105,10 +105,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bo
|
||||
try:
|
||||
await controller.connect()
|
||||
except VelbusConnectionFailed as error:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_failed",
|
||||
) from error
|
||||
raise ConfigEntryNotReady("Cannot connect to Velbus") from error
|
||||
|
||||
task = hass.async_create_task(velbus_scan_task(controller, hass, entry.entry_id))
|
||||
entry.runtime_data = VelbusData(controller=controller, scan_task=task)
|
||||
|
||||
@@ -65,7 +65,7 @@ class VelbusClimate(VelbusEntity, ClimateEntity):
|
||||
)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
def current_temperature(self) -> int | None:
|
||||
"""Return the current temperature."""
|
||||
return self._channel.get_state()
|
||||
|
||||
|
||||
@@ -66,7 +66,6 @@ class VelbusEntity(Entity):
|
||||
self._channel.remove_on_status_update(self._on_update)
|
||||
|
||||
async def _on_update(self) -> None:
|
||||
"""Handle status updates from the channel."""
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -81,13 +80,8 @@ def api_call[_T: VelbusEntity, **_P](
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except OSError as exc:
|
||||
entity_name = self.name if isinstance(self.name, str) else "Unknown"
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_call_failed",
|
||||
translation_placeholders={
|
||||
"entity": entity_name,
|
||||
},
|
||||
f"Could not execute {func.__name__} service for {self.name}"
|
||||
) from exc
|
||||
|
||||
return cmd_wrapper
|
||||
|
||||
@@ -24,12 +24,10 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.color import brightness_to_value, value_to_brightness
|
||||
|
||||
from . import VelbusConfigEntry
|
||||
from .entity import VelbusEntity, api_call
|
||||
|
||||
BRIGHTNESS_SCALE = (1, 100)
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@@ -67,7 +65,7 @@ class VelbusLight(VelbusEntity, LightEntity):
|
||||
@property
|
||||
def brightness(self) -> int:
|
||||
"""Return the brightness of the light."""
|
||||
return value_to_brightness(BRIGHTNESS_SCALE, self._channel.get_dimmer_state())
|
||||
return int((self._channel.get_dimmer_state() * 255) / 100)
|
||||
|
||||
@api_call
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
@@ -77,10 +75,7 @@ class VelbusLight(VelbusEntity, LightEntity):
|
||||
if kwargs[ATTR_BRIGHTNESS] == 0:
|
||||
brightness = 0
|
||||
else:
|
||||
brightness = max(
|
||||
1,
|
||||
int(brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])),
|
||||
)
|
||||
brightness = max(int((kwargs[ATTR_BRIGHTNESS] * 100) / 255), 1)
|
||||
attr, *args = (
|
||||
"set_dimmer_state",
|
||||
brightness,
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"velbus-protocol"
|
||||
],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["velbus-aio==2026.1.1"],
|
||||
"requirements": ["velbus-aio==2025.12.0"],
|
||||
"usb": [
|
||||
{
|
||||
"pid": "0B1B",
|
||||
|
||||
@@ -25,8 +25,8 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
@@ -56,7 +56,7 @@ rules:
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: todo
|
||||
exception-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
|
||||
@@ -57,14 +57,8 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"api_call_failed": {
|
||||
"message": "Action execute for {entity} failed."
|
||||
},
|
||||
"clear_cache_failed": {
|
||||
"message": "Could not clear the Velbus cache: {error}"
|
||||
},
|
||||
"connection_failed": {
|
||||
"message": "Could not connect to Velbus."
|
||||
"message": "Could not cleat the Velbus cache: {error}"
|
||||
},
|
||||
"integration_not_found": {
|
||||
"message": "Integration \"{target}\" not found in registry."
|
||||
|
||||
@@ -5,7 +5,9 @@ rules:
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: Missing data descriptions
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
|
||||
@@ -15,10 +15,6 @@
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::email%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::vesync::config::step::user::data_description::password%]",
|
||||
"username": "[%key:component::vesync::config::step::user::data_description::username%]"
|
||||
},
|
||||
"description": "The VeSync integration needs to re-authenticate your account",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
@@ -27,11 +23,6 @@
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::email%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "Password associated with your VeSync account",
|
||||
"username": "Email address associated with your VeSync account"
|
||||
},
|
||||
"description": "Enter the account used in the vesync app. 2FA is not supported and must be disabled.",
|
||||
"title": "Enter username and password"
|
||||
}
|
||||
}
|
||||
@@ -115,9 +106,6 @@
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"auto_off_config": {
|
||||
"name": "Auto Off"
|
||||
},
|
||||
"child_lock": {
|
||||
"name": "Child lock"
|
||||
},
|
||||
|
||||
@@ -64,16 +64,6 @@ SENSOR_DESCRIPTIONS: Final[tuple[VeSyncSwitchEntityDescription, ...]] = (
|
||||
on_fn=lambda device: device.toggle_child_lock(True),
|
||||
off_fn=lambda device: device.toggle_child_lock(False),
|
||||
),
|
||||
VeSyncSwitchEntityDescription(
|
||||
key="auto_off_config",
|
||||
is_on=lambda device: device.state.automatic_stop_config,
|
||||
exists_fn=(
|
||||
lambda device: rgetattr(device, "state.automatic_stop_config") is not None
|
||||
),
|
||||
translation_key="auto_off_config",
|
||||
on_fn=lambda device: device.toggle_automatic_stop(True),
|
||||
off_fn=lambda device: device.toggle_automatic_stop(False),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -16,8 +16,8 @@ if TYPE_CHECKING:
|
||||
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2026
|
||||
MINOR_VERSION: Final = 2
|
||||
PATCH_VERSION: Final = "0.dev0"
|
||||
MINOR_VERSION: Final = 1
|
||||
PATCH_VERSION: Final = "0b2"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
||||
@@ -2412,8 +2412,10 @@ class Service:
|
||||
|
||||
__slots__ = [
|
||||
"description_placeholders",
|
||||
"domain",
|
||||
"job",
|
||||
"schema",
|
||||
"service",
|
||||
"supports_response",
|
||||
]
|
||||
|
||||
|
||||
@@ -19,8 +19,8 @@ def has_location(state: State) -> bool:
|
||||
"""
|
||||
return (
|
||||
isinstance(state, State)
|
||||
and isinstance(state.attributes.get(ATTR_LATITUDE), (float, int))
|
||||
and isinstance(state.attributes.get(ATTR_LONGITUDE), (float, int))
|
||||
and isinstance(state.attributes.get(ATTR_LATITUDE), float)
|
||||
and isinstance(state.attributes.get(ATTR_LONGITUDE), float)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ hass-nabucasa==1.7.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20251229.0
|
||||
home-assistant-intents==2026.1.1
|
||||
home-assistant-intents==2025.12.2
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
@@ -70,9 +70,9 @@ typing-extensions>=4.15.0,<5.0
|
||||
ulid-transform==1.5.2
|
||||
urllib3>=2.0
|
||||
uv==0.9.17
|
||||
voluptuous-openapi==0.3.0
|
||||
voluptuous-openapi==0.2.0
|
||||
voluptuous-serialize==2.7.0
|
||||
voluptuous==0.16.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
yarl==1.22.0
|
||||
zeroconf==0.148.0
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2026.2.0.dev0"
|
||||
version = "2026.1.0b2"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@@ -76,9 +76,9 @@ dependencies = [
|
||||
"ulid-transform==1.5.2",
|
||||
"urllib3>=2.0",
|
||||
"uv==0.9.17",
|
||||
"voluptuous==0.16.0",
|
||||
"voluptuous==0.15.2",
|
||||
"voluptuous-serialize==2.7.0",
|
||||
"voluptuous-openapi==0.3.0",
|
||||
"voluptuous-openapi==0.2.0",
|
||||
"yarl==1.22.0",
|
||||
"webrtc-models==0.3.0",
|
||||
"zeroconf==0.148.0",
|
||||
|
||||
6
requirements.txt
generated
6
requirements.txt
generated
@@ -27,7 +27,7 @@ ha-ffmpeg==3.2.2
|
||||
hass-nabucasa==1.7.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-intents==2026.1.1
|
||||
home-assistant-intents==2025.12.2
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
@@ -54,9 +54,9 @@ typing-extensions>=4.15.0,<5.0
|
||||
ulid-transform==1.5.2
|
||||
urllib3>=2.0
|
||||
uv==0.9.17
|
||||
voluptuous-openapi==0.3.0
|
||||
voluptuous-openapi==0.2.0
|
||||
voluptuous-serialize==2.7.0
|
||||
voluptuous==0.16.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
yarl==1.22.0
|
||||
zeroconf==0.148.0
|
||||
|
||||
12
requirements_all.txt
generated
12
requirements_all.txt
generated
@@ -1216,7 +1216,7 @@ holidays==0.84
|
||||
home-assistant-frontend==20251229.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.1
|
||||
home-assistant-intents==2025.12.2
|
||||
|
||||
# homeassistant.components.gentex_homelink
|
||||
homelink-integration-api==0.0.1
|
||||
@@ -1391,7 +1391,7 @@ libpyfoscamcgi==0.0.9
|
||||
libpyvivotek==0.6.1
|
||||
|
||||
# homeassistant.components.libre_hardware_monitor
|
||||
librehardwaremonitor-api==1.7.2
|
||||
librehardwaremonitor-api==1.6.0
|
||||
|
||||
# homeassistant.components.mikrotik
|
||||
librouteros==3.2.0
|
||||
@@ -2046,7 +2046,7 @@ pyfibaro==0.8.3
|
||||
pyfido==2.1.2
|
||||
|
||||
# homeassistant.components.firefly_iii
|
||||
pyfirefly==0.1.10
|
||||
pyfirefly==0.1.8
|
||||
|
||||
# homeassistant.components.fireservicerota
|
||||
pyfireservicerota==0.0.46
|
||||
@@ -2887,7 +2887,7 @@ smart-meter-texas==0.5.5
|
||||
snapcast==2.3.6
|
||||
|
||||
# homeassistant.components.sonos
|
||||
soco==0.30.14
|
||||
soco==0.30.13
|
||||
|
||||
# homeassistant.components.solaredge_local
|
||||
solaredge-local==0.2.3
|
||||
@@ -3039,7 +3039,7 @@ tololib==1.2.2
|
||||
toonapi==0.3.0
|
||||
|
||||
# homeassistant.components.totalconnect
|
||||
total-connect-client==2025.12.2
|
||||
total-connect-client==2025.5
|
||||
|
||||
# homeassistant.components.tplink_lte
|
||||
tp-connected==0.0.4
|
||||
@@ -3122,7 +3122,7 @@ vegehub==0.1.26
|
||||
vehicle==2.2.2
|
||||
|
||||
# homeassistant.components.velbus
|
||||
velbus-aio==2026.1.1
|
||||
velbus-aio==2025.12.0
|
||||
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.21
|
||||
|
||||
12
requirements_test_all.txt
generated
12
requirements_test_all.txt
generated
@@ -1074,7 +1074,7 @@ holidays==0.84
|
||||
home-assistant-frontend==20251229.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.1
|
||||
home-assistant-intents==2025.12.2
|
||||
|
||||
# homeassistant.components.gentex_homelink
|
||||
homelink-integration-api==0.0.1
|
||||
@@ -1222,7 +1222,7 @@ libpyfoscamcgi==0.0.9
|
||||
libpyvivotek==0.6.1
|
||||
|
||||
# homeassistant.components.libre_hardware_monitor
|
||||
librehardwaremonitor-api==1.7.2
|
||||
librehardwaremonitor-api==1.6.0
|
||||
|
||||
# homeassistant.components.mikrotik
|
||||
librouteros==3.2.0
|
||||
@@ -1732,7 +1732,7 @@ pyfibaro==0.8.3
|
||||
pyfido==2.1.2
|
||||
|
||||
# homeassistant.components.firefly_iii
|
||||
pyfirefly==0.1.10
|
||||
pyfirefly==0.1.8
|
||||
|
||||
# homeassistant.components.fireservicerota
|
||||
pyfireservicerota==0.0.46
|
||||
@@ -2414,7 +2414,7 @@ smart-meter-texas==0.5.5
|
||||
snapcast==2.3.6
|
||||
|
||||
# homeassistant.components.sonos
|
||||
soco==0.30.14
|
||||
soco==0.30.13
|
||||
|
||||
# homeassistant.components.solaredge
|
||||
solaredge-web==0.0.1
|
||||
@@ -2533,7 +2533,7 @@ tololib==1.2.2
|
||||
toonapi==0.3.0
|
||||
|
||||
# homeassistant.components.totalconnect
|
||||
total-connect-client==2025.12.2
|
||||
total-connect-client==2025.5
|
||||
|
||||
# homeassistant.components.tplink_omada
|
||||
tplink-omada-client==1.5.3
|
||||
@@ -2607,7 +2607,7 @@ vegehub==0.1.26
|
||||
vehicle==2.2.2
|
||||
|
||||
# homeassistant.components.velbus
|
||||
velbus-aio==2026.1.1
|
||||
velbus-aio==2025.12.0
|
||||
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.21
|
||||
|
||||
@@ -359,6 +359,7 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"fail2ban",
|
||||
"familyhub",
|
||||
"fastdotcom",
|
||||
"feedreader",
|
||||
"ffmpeg_motion",
|
||||
"ffmpeg_noise",
|
||||
"fibaro",
|
||||
@@ -931,6 +932,7 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"switchmate",
|
||||
"syncthing",
|
||||
"synology_chat",
|
||||
"synology_dsm",
|
||||
"synology_srm",
|
||||
"syslog",
|
||||
"system_bridge",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from collections.abc import Iterable
|
||||
from enum import StrEnum
|
||||
import itertools
|
||||
from typing import Any, TypedDict
|
||||
from typing import TypedDict
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
@@ -167,13 +167,12 @@ class StateDescription(TypedDict):
|
||||
def parametrize_trigger_states(
|
||||
*,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any] | None = None,
|
||||
target_states: list[str | None | tuple[str | None, dict]],
|
||||
other_states: list[str | None | tuple[str | None, dict]],
|
||||
additional_attributes: dict | None = None,
|
||||
trigger_from_none: bool = True,
|
||||
retrigger_on_target_state: bool = False,
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
) -> list[tuple[str, list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts.
|
||||
|
||||
The target_states and other_states iterables are either iterables of
|
||||
@@ -190,7 +189,6 @@ def parametrize_trigger_states(
|
||||
"""
|
||||
|
||||
additional_attributes = additional_attributes or {}
|
||||
trigger_options = trigger_options or {}
|
||||
|
||||
def state_with_attributes(
|
||||
state: str | None | tuple[str | None, dict], count: int
|
||||
@@ -224,7 +222,6 @@ def parametrize_trigger_states(
|
||||
# Initial state None
|
||||
(
|
||||
trigger,
|
||||
trigger_options,
|
||||
list(
|
||||
itertools.chain.from_iterable(
|
||||
(
|
||||
@@ -243,7 +240,6 @@ def parametrize_trigger_states(
|
||||
# Initial state different from target state
|
||||
(
|
||||
trigger,
|
||||
trigger_options,
|
||||
# other_state,
|
||||
list(
|
||||
itertools.chain.from_iterable(
|
||||
@@ -261,7 +257,6 @@ def parametrize_trigger_states(
|
||||
# Initial state same as target state
|
||||
(
|
||||
trigger,
|
||||
trigger_options,
|
||||
list(
|
||||
itertools.chain.from_iterable(
|
||||
(
|
||||
@@ -281,7 +276,6 @@ def parametrize_trigger_states(
|
||||
# Initial state unavailable / unknown
|
||||
(
|
||||
trigger,
|
||||
trigger_options,
|
||||
list(
|
||||
itertools.chain.from_iterable(
|
||||
(
|
||||
@@ -297,7 +291,6 @@ def parametrize_trigger_states(
|
||||
),
|
||||
(
|
||||
trigger,
|
||||
trigger_options,
|
||||
list(
|
||||
itertools.chain.from_iterable(
|
||||
(
|
||||
@@ -318,7 +311,6 @@ def parametrize_trigger_states(
|
||||
tests.append(
|
||||
(
|
||||
trigger,
|
||||
trigger_options,
|
||||
list(
|
||||
itertools.chain.from_iterable(
|
||||
(
|
||||
@@ -346,7 +338,7 @@ def parametrize_trigger_states(
|
||||
async def arm_trigger(
|
||||
hass: HomeAssistant,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any] | None,
|
||||
trigger_options: dict | None,
|
||||
trigger_target: dict,
|
||||
) -> None:
|
||||
"""Arm the specified trigger, call service test.automation when it triggers."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test alarm control panel triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -76,7 +75,7 @@ async def test_alarm_control_panel_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities("alarm_control_panel"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="alarm_control_panel.armed",
|
||||
@@ -151,7 +150,6 @@ async def test_alarm_control_panel_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the alarm control panel state trigger fires when any alarm control panel state changes to a specific state."""
|
||||
@@ -187,7 +185,7 @@ async def test_alarm_control_panel_state_trigger_behavior_any(
|
||||
parametrize_target_entities("alarm_control_panel"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="alarm_control_panel.armed",
|
||||
@@ -262,7 +260,6 @@ async def test_alarm_control_panel_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the alarm control panel state trigger fires when the first alarm control panel changes to a specific state."""
|
||||
@@ -297,7 +294,7 @@ async def test_alarm_control_panel_state_trigger_behavior_first(
|
||||
parametrize_target_entities("alarm_control_panel"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="alarm_control_panel.armed",
|
||||
@@ -372,7 +369,6 @@ async def test_alarm_control_panel_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the alarm_control_panel state trigger fires when the last alarm_control_panel changes to a specific state."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test assist satellite triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -70,7 +69,7 @@ async def test_assist_satellite_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities("assist_satellite"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="assist_satellite.idle",
|
||||
@@ -102,7 +101,6 @@ async def test_assist_satellite_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the assist satellite state trigger fires when any assist satellite state changes to a specific state."""
|
||||
@@ -138,7 +136,7 @@ async def test_assist_satellite_state_trigger_behavior_any(
|
||||
parametrize_target_entities("assist_satellite"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="assist_satellite.idle",
|
||||
@@ -170,7 +168,6 @@ async def test_assist_satellite_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the assist satellite state trigger fires when the first assist satellite changes to a specific state."""
|
||||
@@ -205,7 +202,7 @@ async def test_assist_satellite_state_trigger_behavior_first(
|
||||
parametrize_target_entities("assist_satellite"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="assist_satellite.idle",
|
||||
@@ -237,7 +234,6 @@ async def test_assist_satellite_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the assist_satellite state trigger fires when the last assist_satellite changes to a specific state."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test binary sensor trigger."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -72,7 +71,7 @@ async def test_binary_sensor_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities("binary_sensor"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="binary_sensor.occupancy_detected",
|
||||
@@ -98,7 +97,6 @@ async def test_binary_sensor_state_attribute_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the binary sensor state trigger fires when any binary sensor state changes to a specific state."""
|
||||
@@ -142,7 +140,7 @@ async def test_binary_sensor_state_attribute_trigger_behavior_any(
|
||||
parametrize_target_entities("binary_sensor"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="binary_sensor.occupancy_detected",
|
||||
@@ -168,7 +166,6 @@ async def test_binary_sensor_state_attribute_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the binary sensor state trigger fires when the first binary sensor state changes to a specific state."""
|
||||
@@ -211,7 +208,7 @@ async def test_binary_sensor_state_attribute_trigger_behavior_first(
|
||||
parametrize_target_entities("binary_sensor"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="binary_sensor.occupancy_detected",
|
||||
@@ -237,7 +234,6 @@ async def test_binary_sensor_state_attribute_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the binary sensor state trigger fires when the last binary sensor state changes to a specific state."""
|
||||
|
||||
@@ -6,7 +6,6 @@ from unittest.mock import MagicMock
|
||||
|
||||
from bsblan import BSBLANError, DaySchedule, TimeSlot
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bsblan.const import DOMAIN
|
||||
from homeassistant.components.bsblan.services import (
|
||||
@@ -199,7 +198,9 @@ async def test_no_config_entry_for_device(
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE,
|
||||
{
|
||||
"device_id": device_entry.id,
|
||||
"monday_slots": [{"start_time": time(6, 0), "end_time": time(8, 0)}],
|
||||
"monday_slots": [
|
||||
{"start_time": time(6, 0), "end_time": time(8, 0)},
|
||||
],
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
@@ -273,10 +274,14 @@ async def test_api_error(
|
||||
[
|
||||
(time(13, 0), time(11, 0), "end_time_before_start_time"),
|
||||
("13:00", "11:00", "end_time_before_start_time"),
|
||||
("invalid", "08:00", "invalid_time_format"),
|
||||
("06:00", "not-a-time", "invalid_time_format"),
|
||||
],
|
||||
ids=[
|
||||
"time_objects_end_before_start",
|
||||
"strings_end_before_start",
|
||||
"invalid_start_time_format",
|
||||
"invalid_end_time_format",
|
||||
],
|
||||
)
|
||||
async def test_time_validation_errors(
|
||||
@@ -390,20 +395,22 @@ async def test_non_standard_time_types(
|
||||
device_entry: dr.DeviceEntry,
|
||||
) -> None:
|
||||
"""Test service with non-standard time types raises error."""
|
||||
# Test with integer time values - schema validation will reject these
|
||||
with pytest.raises(vol.MultipleInvalid):
|
||||
# Test with integer time values (shouldn't happen but need coverage)
|
||||
with pytest.raises(ServiceValidationError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE,
|
||||
{
|
||||
"device_id": device_entry.id,
|
||||
"monday_slots": [
|
||||
{"start_time": 600, "end_time": 800},
|
||||
{"start_time": 600, "end_time": 800}, # Non-standard types
|
||||
],
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert exc_info.value.translation_key == "invalid_time_format"
|
||||
|
||||
|
||||
async def test_async_setup_services(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -153,12 +153,37 @@ async def test_climate_trigger_validation(
|
||||
)
|
||||
|
||||
|
||||
def parametrize_climate_trigger_states(
|
||||
*,
|
||||
trigger: str,
|
||||
trigger_options: dict | None = None,
|
||||
target_states: list[str | None | tuple[str | None, dict]],
|
||||
other_states: list[str | None | tuple[str | None, dict]],
|
||||
additional_attributes: dict | None = None,
|
||||
trigger_from_none: bool = True,
|
||||
retrigger_on_target_state: bool = False,
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts."""
|
||||
trigger_options = trigger_options or {}
|
||||
return [
|
||||
(s[0], trigger_options, *s[1:])
|
||||
for s in parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
target_states=target_states,
|
||||
other_states=other_states,
|
||||
additional_attributes=additional_attributes,
|
||||
trigger_from_none=trigger_from_none,
|
||||
retrigger_on_target_state=retrigger_on_target_state,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def parametrize_xxx_changed_trigger_states(
|
||||
trigger: str, attribute: str
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts for xxx_changed triggers."""
|
||||
return [
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={},
|
||||
target_states=[
|
||||
@@ -169,7 +194,7 @@ def parametrize_xxx_changed_trigger_states(
|
||||
other_states=[(HVACMode.AUTO, {attribute: None})],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={CONF_ABOVE: 10},
|
||||
target_states=[
|
||||
@@ -182,7 +207,7 @@ def parametrize_xxx_changed_trigger_states(
|
||||
],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={CONF_BELOW: 90},
|
||||
target_states=[
|
||||
@@ -203,7 +228,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts for xxx_crossed_threshold triggers."""
|
||||
return [
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.BETWEEN,
|
||||
@@ -220,7 +245,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(HVACMode.AUTO, {attribute: 100}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.OUTSIDE,
|
||||
@@ -237,7 +262,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(HVACMode.AUTO, {attribute: 60}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.ABOVE,
|
||||
@@ -252,7 +277,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(HVACMode.AUTO, {attribute: 0}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.BELOW,
|
||||
@@ -278,18 +303,18 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.hvac_mode_changed",
|
||||
trigger_options={CONF_HVAC_MODE: [HVACMode.HEAT, HVACMode.COOL]},
|
||||
target_states=[HVACMode.HEAT, HVACMode.COOL],
|
||||
other_states=other_states([HVACMode.HEAT, HVACMode.COOL]),
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.turned_off",
|
||||
target_states=[HVACMode.OFF],
|
||||
other_states=other_states(HVACMode.OFF),
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.turned_on",
|
||||
target_states=[
|
||||
HVACMode.AUTO,
|
||||
@@ -375,17 +400,17 @@ async def test_climate_state_trigger_behavior_any(
|
||||
*parametrize_xxx_crossed_threshold_trigger_states(
|
||||
"climate.target_temperature_crossed_threshold", ATTR_TEMPERATURE
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_cooling",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.COOLING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_drying",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.DRYING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_heating",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.HEATING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
@@ -438,18 +463,18 @@ async def test_climate_state_attribute_trigger_behavior_any(
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.hvac_mode_changed",
|
||||
trigger_options={CONF_HVAC_MODE: [HVACMode.HEAT, HVACMode.COOL]},
|
||||
target_states=[HVACMode.HEAT, HVACMode.COOL],
|
||||
other_states=other_states([HVACMode.HEAT, HVACMode.COOL]),
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.turned_off",
|
||||
target_states=[HVACMode.OFF],
|
||||
other_states=other_states(HVACMode.OFF),
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.turned_on",
|
||||
target_states=[
|
||||
HVACMode.AUTO,
|
||||
@@ -524,17 +549,17 @@ async def test_climate_state_trigger_behavior_first(
|
||||
*parametrize_xxx_crossed_threshold_trigger_states(
|
||||
"climate.target_temperature_crossed_threshold", ATTR_TEMPERATURE
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_cooling",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.COOLING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_drying",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.DRYING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_heating",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.HEATING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
@@ -588,18 +613,18 @@ async def test_climate_state_attribute_trigger_behavior_first(
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.hvac_mode_changed",
|
||||
trigger_options={CONF_HVAC_MODE: [HVACMode.HEAT, HVACMode.COOL]},
|
||||
target_states=[HVACMode.HEAT, HVACMode.COOL],
|
||||
other_states=other_states([HVACMode.HEAT, HVACMode.COOL]),
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.turned_off",
|
||||
target_states=[HVACMode.OFF],
|
||||
other_states=other_states(HVACMode.OFF),
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.turned_on",
|
||||
target_states=[
|
||||
HVACMode.AUTO,
|
||||
@@ -673,17 +698,17 @@ async def test_climate_state_trigger_behavior_last(
|
||||
*parametrize_xxx_crossed_threshold_trigger_states(
|
||||
"climate.target_temperature_crossed_threshold", ATTR_TEMPERATURE
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_cooling",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.COOLING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_drying",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.DRYING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_climate_trigger_states(
|
||||
trigger="climate.started_heating",
|
||||
target_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.HEATING})],
|
||||
other_states=[(HVACMode.AUTO, {ATTR_HVAC_ACTION: HVACAction.IDLE})],
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test device_tracker trigger."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -70,7 +69,7 @@ async def test_device_tracker_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities("device_tracker"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.entered_home",
|
||||
@@ -92,7 +91,6 @@ async def test_device_tracker_home_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the device_tracker home triggers when any device_tracker changes to a specific state."""
|
||||
@@ -128,7 +126,7 @@ async def test_device_tracker_home_trigger_behavior_any(
|
||||
parametrize_target_entities("device_tracker"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.entered_home",
|
||||
@@ -150,7 +148,6 @@ async def test_device_tracker_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the device_tracker home triggers when the first device_tracker changes to a specific state."""
|
||||
@@ -185,7 +182,7 @@ async def test_device_tracker_state_trigger_behavior_first(
|
||||
parametrize_target_entities("device_tracker"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.entered_home",
|
||||
@@ -207,7 +204,6 @@ async def test_device_tracker_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the device_tracker home triggers when the last device_tracker changes to a specific state."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test fan trigger."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -66,7 +65,7 @@ async def test_fan_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities("fan"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="fan.turned_on",
|
||||
@@ -88,7 +87,6 @@ async def test_fan_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the fan state trigger fires when any fan state changes to a specific state."""
|
||||
@@ -124,7 +122,7 @@ async def test_fan_state_trigger_behavior_any(
|
||||
parametrize_target_entities("fan"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="fan.turned_on",
|
||||
@@ -146,7 +144,6 @@ async def test_fan_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the fan state trigger fires when the first fan changes to a specific state."""
|
||||
@@ -181,7 +178,7 @@ async def test_fan_state_trigger_behavior_first(
|
||||
parametrize_target_entities("fan"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="fan.turned_on",
|
||||
@@ -203,7 +200,6 @@ async def test_fan_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the fan state trigger fires when the last fan changes to a specific state."""
|
||||
|
||||
@@ -82,12 +82,37 @@ async def test_humidifier_triggers_gated_by_labs_flag(
|
||||
) in caplog.text
|
||||
|
||||
|
||||
def parametrize_humidifier_trigger_states(
|
||||
*,
|
||||
trigger: str,
|
||||
trigger_options: dict | None = None,
|
||||
target_states: list[str | None | tuple[str | None, dict]],
|
||||
other_states: list[str | None | tuple[str | None, dict]],
|
||||
additional_attributes: dict | None = None,
|
||||
trigger_from_none: bool = True,
|
||||
retrigger_on_target_state: bool = False,
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts."""
|
||||
trigger_options = trigger_options or {}
|
||||
return [
|
||||
(s[0], trigger_options, *s[1:])
|
||||
for s in parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
target_states=target_states,
|
||||
other_states=other_states,
|
||||
additional_attributes=additional_attributes,
|
||||
trigger_from_none=trigger_from_none,
|
||||
retrigger_on_target_state=retrigger_on_target_state,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def parametrize_xxx_changed_trigger_states(
|
||||
trigger: str, attribute: str
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts for xxx_changed triggers."""
|
||||
return [
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={},
|
||||
target_states=[
|
||||
@@ -98,7 +123,7 @@ def parametrize_xxx_changed_trigger_states(
|
||||
other_states=[(STATE_ON, {attribute: None})],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={CONF_ABOVE: 10},
|
||||
target_states=[
|
||||
@@ -111,7 +136,7 @@ def parametrize_xxx_changed_trigger_states(
|
||||
],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={CONF_BELOW: 90},
|
||||
target_states=[
|
||||
@@ -132,7 +157,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts for xxx_crossed_threshold triggers."""
|
||||
return [
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.BETWEEN,
|
||||
@@ -149,7 +174,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(STATE_ON, {attribute: 100}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.OUTSIDE,
|
||||
@@ -166,7 +191,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(STATE_ON, {attribute: 60}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.ABOVE,
|
||||
@@ -181,7 +206,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(STATE_ON, {attribute: 0}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.BELOW,
|
||||
@@ -205,7 +230,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
parametrize_target_entities("humidifier"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="humidifier.turned_on",
|
||||
@@ -227,7 +252,6 @@ async def test_humidifier_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the humidifier state trigger fires when any humidifier state changes to a specific state."""
|
||||
@@ -271,12 +295,12 @@ async def test_humidifier_state_trigger_behavior_any(
|
||||
*parametrize_xxx_crossed_threshold_trigger_states(
|
||||
"humidifier.current_humidity_crossed_threshold", ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger="humidifier.started_drying",
|
||||
target_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.DRYING})],
|
||||
other_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger="humidifier.started_humidifying",
|
||||
target_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.HUMIDIFYING})],
|
||||
other_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.IDLE})],
|
||||
@@ -327,7 +351,7 @@ async def test_humidifier_state_attribute_trigger_behavior_any(
|
||||
parametrize_target_entities("humidifier"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="humidifier.turned_on",
|
||||
@@ -349,7 +373,6 @@ async def test_humidifier_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the humidifier state trigger fires when the first humidifier changes to a specific state."""
|
||||
@@ -389,12 +412,12 @@ async def test_humidifier_state_trigger_behavior_first(
|
||||
*parametrize_xxx_crossed_threshold_trigger_states(
|
||||
"humidifier.current_humidity_crossed_threshold", ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger="humidifier.started_drying",
|
||||
target_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.DRYING})],
|
||||
other_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger="humidifier.started_humidifying",
|
||||
target_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.HUMIDIFYING})],
|
||||
other_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.IDLE})],
|
||||
@@ -446,7 +469,7 @@ async def test_humidifier_state_attribute_trigger_behavior_first(
|
||||
parametrize_target_entities("humidifier"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="humidifier.turned_on",
|
||||
@@ -468,7 +491,6 @@ async def test_humidifier_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the humidifier state trigger fires when the last humidifier changes to a specific state."""
|
||||
@@ -507,12 +529,12 @@ async def test_humidifier_state_trigger_behavior_last(
|
||||
*parametrize_xxx_crossed_threshold_trigger_states(
|
||||
"humidifier.current_humidity_crossed_threshold", ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger="humidifier.started_drying",
|
||||
target_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.DRYING})],
|
||||
other_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.IDLE})],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_humidifier_trigger_states(
|
||||
trigger="humidifier.started_humidifying",
|
||||
target_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.HUMIDIFYING})],
|
||||
other_states=[(STATE_ON, {ATTR_ACTION: HumidifierAction.IDLE})],
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test lawn mower triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -70,7 +69,7 @@ async def test_lawn_mower_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities("lawn_mower"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="lawn_mower.docked",
|
||||
@@ -102,7 +101,6 @@ async def test_lawn_mower_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the lawn mower state trigger fires when any lawn mower state changes to a specific state."""
|
||||
@@ -138,7 +136,7 @@ async def test_lawn_mower_state_trigger_behavior_any(
|
||||
parametrize_target_entities("lawn_mower"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="lawn_mower.docked",
|
||||
@@ -170,7 +168,6 @@ async def test_lawn_mower_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the lawn mower state trigger fires when the first lawn mower changes to a specific state."""
|
||||
@@ -205,7 +202,7 @@ async def test_lawn_mower_state_trigger_behavior_first(
|
||||
parametrize_target_entities("lawn_mower"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="lawn_mower.docked",
|
||||
@@ -237,7 +234,6 @@ async def test_lawn_mower_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the lawn_mower state trigger fires when the last lawn_mower changes to a specific state."""
|
||||
|
||||
@@ -76,12 +76,37 @@ async def test_light_triggers_gated_by_labs_flag(
|
||||
) in caplog.text
|
||||
|
||||
|
||||
def parametrize_light_trigger_states(
|
||||
*,
|
||||
trigger: str,
|
||||
trigger_options: dict | None = None,
|
||||
target_states: list[str | None | tuple[str | None, dict]],
|
||||
other_states: list[str | None | tuple[str | None, dict]],
|
||||
additional_attributes: dict | None = None,
|
||||
trigger_from_none: bool = True,
|
||||
retrigger_on_target_state: bool = False,
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts."""
|
||||
trigger_options = trigger_options or {}
|
||||
return [
|
||||
(s[0], trigger_options, *s[1:])
|
||||
for s in parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
target_states=target_states,
|
||||
other_states=other_states,
|
||||
additional_attributes=additional_attributes,
|
||||
trigger_from_none=trigger_from_none,
|
||||
retrigger_on_target_state=retrigger_on_target_state,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def parametrize_xxx_changed_trigger_states(
|
||||
trigger: str, attribute: str
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts for xxx_changed triggers."""
|
||||
return [
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_light_trigger_states(
|
||||
trigger=trigger,
|
||||
target_states=[
|
||||
(STATE_ON, {attribute: 0}),
|
||||
@@ -91,7 +116,7 @@ def parametrize_xxx_changed_trigger_states(
|
||||
other_states=[(STATE_ON, {attribute: None})],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_light_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={CONF_ABOVE: 10},
|
||||
target_states=[
|
||||
@@ -104,7 +129,7 @@ def parametrize_xxx_changed_trigger_states(
|
||||
],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_light_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={CONF_BELOW: 90},
|
||||
target_states=[
|
||||
@@ -125,7 +150,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
) -> list[tuple[str, dict[str, Any], list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts for xxx_crossed_threshold triggers."""
|
||||
return [
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_light_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.BETWEEN,
|
||||
@@ -142,7 +167,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(STATE_ON, {attribute: 100}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_light_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.OUTSIDE,
|
||||
@@ -159,7 +184,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(STATE_ON, {attribute: 60}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_light_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.ABOVE,
|
||||
@@ -174,7 +199,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
(STATE_ON, {attribute: 0}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
*parametrize_light_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.BELOW,
|
||||
@@ -198,7 +223,7 @@ def parametrize_xxx_crossed_threshold_trigger_states(
|
||||
parametrize_target_entities("light"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="light.turned_on",
|
||||
@@ -220,7 +245,6 @@ async def test_light_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the light state trigger fires when any light state changes to a specific state."""
|
||||
@@ -310,7 +334,7 @@ async def test_light_state_attribute_trigger_behavior_any(
|
||||
parametrize_target_entities("light"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="light.turned_on",
|
||||
@@ -332,7 +356,6 @@ async def test_light_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the light state trigger fires when the first light changes to a specific state."""
|
||||
@@ -419,7 +442,7 @@ async def test_light_state_attribute_trigger_behavior_first(
|
||||
parametrize_target_entities("light"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="light.turned_on",
|
||||
@@ -441,7 +464,6 @@ async def test_light_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the light state trigger fires when the last light changes to a specific state."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test lock triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -70,7 +69,7 @@ async def test_lock_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="lock.jammed",
|
||||
@@ -102,7 +101,6 @@ async def test_lock_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the lock state trigger fires when any lock state changes to a specific state."""
|
||||
@@ -138,7 +136,7 @@ async def test_lock_state_trigger_behavior_any(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="lock.jammed",
|
||||
@@ -170,7 +168,6 @@ async def test_lock_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the lock state trigger fires when the first lock changes to a specific state."""
|
||||
@@ -205,7 +202,7 @@ async def test_lock_state_trigger_behavior_first(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="lock.jammed",
|
||||
@@ -237,7 +234,6 @@ async def test_lock_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the lock state trigger fires when the last lock changes to a specific state."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test media player trigger."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -66,7 +65,7 @@ async def test_media_player_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities("media_player"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="media_player.stopped_playing",
|
||||
@@ -91,7 +90,6 @@ async def test_media_player_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the media player state trigger fires when any media player state changes to a specific state."""
|
||||
@@ -127,7 +125,7 @@ async def test_media_player_state_trigger_behavior_any(
|
||||
parametrize_target_entities("media_player"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="media_player.stopped_playing",
|
||||
@@ -152,7 +150,6 @@ async def test_media_player_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the media player state trigger fires when the first media player changes to a specific state."""
|
||||
@@ -187,7 +184,7 @@ async def test_media_player_state_trigger_behavior_first(
|
||||
parametrize_target_entities("media_player"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="media_player.stopped_playing",
|
||||
@@ -212,7 +209,6 @@ async def test_media_player_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the media player state trigger fires when the last media player changes to a specific state."""
|
||||
|
||||
@@ -1,245 +0,0 @@
|
||||
# serializer version: 1
|
||||
# name: test_entry_diagnostics
|
||||
dict({
|
||||
'data': dict({
|
||||
'binary_sensor': dict({
|
||||
'alarm_ofa_cl': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'alarm_ofa_orp': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'alarm_ofa_ph': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'flow_rate_alarm': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'orp_level_alarm': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'ph_level_alarm': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'pump_alarm': dict({
|
||||
'value': True,
|
||||
}),
|
||||
'relay_alarm': dict({
|
||||
'value': True,
|
||||
}),
|
||||
'relay_aux1': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'relay_aux2': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'relay_aux3': dict({
|
||||
'value': False,
|
||||
}),
|
||||
}),
|
||||
'number': dict({
|
||||
'cl_target': dict({
|
||||
'max': 65535,
|
||||
'min': 0,
|
||||
'step': 0.01,
|
||||
'unit': 'ppm',
|
||||
'value': 1,
|
||||
}),
|
||||
'ofa_cl_lower': dict({
|
||||
'max': 10,
|
||||
'min': 0,
|
||||
'step': 0.1,
|
||||
'unit': 'ppm',
|
||||
'value': 0.2,
|
||||
}),
|
||||
'ofa_cl_upper': dict({
|
||||
'max': 10,
|
||||
'min': 0,
|
||||
'step': 0.1,
|
||||
'unit': 'ppm',
|
||||
'value': 0.9,
|
||||
}),
|
||||
'ofa_orp_lower': dict({
|
||||
'max': 1000,
|
||||
'min': 0,
|
||||
'step': 1,
|
||||
'unit': 'mV',
|
||||
'value': 600,
|
||||
}),
|
||||
'ofa_orp_upper': dict({
|
||||
'max': 1000,
|
||||
'min': 0,
|
||||
'step': 1,
|
||||
'unit': 'mV',
|
||||
'value': 800,
|
||||
}),
|
||||
'ofa_ph_lower': dict({
|
||||
'max': 14,
|
||||
'min': 0,
|
||||
'step': 0.1,
|
||||
'unit': None,
|
||||
'value': 6,
|
||||
}),
|
||||
'ofa_ph_upper': dict({
|
||||
'max': 14,
|
||||
'min': 0,
|
||||
'step': 0.1,
|
||||
'unit': None,
|
||||
'value': 8,
|
||||
}),
|
||||
'orp_target': dict({
|
||||
'max': 850,
|
||||
'min': 400,
|
||||
'step': 1,
|
||||
'unit': 'mV',
|
||||
'value': 680,
|
||||
}),
|
||||
'ph_target': dict({
|
||||
'max': 8,
|
||||
'min': 6,
|
||||
'step': 0.1,
|
||||
'unit': None,
|
||||
'value': 6.5,
|
||||
}),
|
||||
}),
|
||||
'select': dict({
|
||||
'cl_type_dosing_method': dict({
|
||||
'value': 'timed',
|
||||
}),
|
||||
'cl_type_dosing_set': dict({
|
||||
'value': 'high',
|
||||
}),
|
||||
'flow_rate_unit': dict({
|
||||
'value': 'L/s',
|
||||
}),
|
||||
'orp_type_dosing_method': dict({
|
||||
'value': 'on_off',
|
||||
}),
|
||||
'orp_type_dosing_set': dict({
|
||||
'value': 'low',
|
||||
}),
|
||||
'ph_type_dosing_method': dict({
|
||||
'value': 'proportional',
|
||||
}),
|
||||
'ph_type_dosing_set': dict({
|
||||
'value': 'acid',
|
||||
}),
|
||||
'water_meter_unit': dict({
|
||||
'value': 'm3',
|
||||
}),
|
||||
}),
|
||||
'sensor': dict({
|
||||
'cl': dict({
|
||||
'unit': 'ppm',
|
||||
'value': 1.2,
|
||||
}),
|
||||
'cl_type_dosing': dict({
|
||||
'unit': None,
|
||||
'value': 'low',
|
||||
}),
|
||||
'flow_rate': dict({
|
||||
'unit': 'L/s',
|
||||
'value': 150,
|
||||
}),
|
||||
'ofa_orp_time': dict({
|
||||
'unit': 'min',
|
||||
'value': 0,
|
||||
}),
|
||||
'ofa_ph_time': dict({
|
||||
'unit': 'min',
|
||||
'value': 0,
|
||||
}),
|
||||
'orp': dict({
|
||||
'unit': 'mV',
|
||||
'value': 718,
|
||||
}),
|
||||
'orp_calibration_offset': dict({
|
||||
'unit': 'mV',
|
||||
'value': 0,
|
||||
}),
|
||||
'orp_calibration_slope': dict({
|
||||
'unit': 'mV',
|
||||
'value': 0.96,
|
||||
}),
|
||||
'orp_calibration_type': dict({
|
||||
'unit': None,
|
||||
'value': '1_point',
|
||||
}),
|
||||
'orp_type_dosing': dict({
|
||||
'unit': None,
|
||||
'value': 'low',
|
||||
}),
|
||||
'peristaltic_cl_dosing': dict({
|
||||
'unit': None,
|
||||
'value': 'off',
|
||||
}),
|
||||
'peristaltic_orp_dosing': dict({
|
||||
'unit': None,
|
||||
'value': 'proportional',
|
||||
}),
|
||||
'peristaltic_ph_dosing': dict({
|
||||
'unit': None,
|
||||
'value': 'proportional',
|
||||
}),
|
||||
'ph': dict({
|
||||
'unit': None,
|
||||
'value': 6.8,
|
||||
}),
|
||||
'ph_calibration_offset': dict({
|
||||
'unit': 'mV',
|
||||
'value': 8,
|
||||
}),
|
||||
'ph_calibration_slope': dict({
|
||||
'unit': 'mV',
|
||||
'value': 57.34,
|
||||
}),
|
||||
'ph_calibration_type': dict({
|
||||
'unit': None,
|
||||
'value': '2_points',
|
||||
}),
|
||||
'ph_type_dosing': dict({
|
||||
'unit': None,
|
||||
'value': 'alcalyne',
|
||||
}),
|
||||
'temperature': dict({
|
||||
'unit': '°C',
|
||||
'value': 25,
|
||||
}),
|
||||
'water_meter_total_permanent': dict({
|
||||
'unit': 'm3',
|
||||
'value': 12345.67,
|
||||
}),
|
||||
'water_meter_total_resettable': dict({
|
||||
'unit': 'm3',
|
||||
'value': 123.45,
|
||||
}),
|
||||
}),
|
||||
'switch': dict({
|
||||
'frequency_input': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'pause_dosing': dict({
|
||||
'value': False,
|
||||
}),
|
||||
'pump_monitoring': dict({
|
||||
'value': True,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'device_info': dict({
|
||||
'API_VERSION': 'v1/',
|
||||
'DEVICE_ID': '**REDACTED**',
|
||||
'FW_CODE': '539187',
|
||||
'FW_VERSION': '1.30',
|
||||
'GROUPNAME': '**REDACTED**',
|
||||
'IP': '**REDACTED**',
|
||||
'MAC': '',
|
||||
'MODEL': 'POOL DOSE',
|
||||
'MODEL_ID': 'PDPR1H1HAW100',
|
||||
'NAME': '**REDACTED**',
|
||||
'OWNERID': '**REDACTED**',
|
||||
'SERIAL_NUMBER': '**REDACTED**',
|
||||
'SW_VERSION': '2.10',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
@@ -1,10 +1,8 @@
|
||||
"""Test the PoolDose config flow."""
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.pooldose.const import DOMAIN
|
||||
@@ -16,7 +14,7 @@ from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .conftest import RequestStatus
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_full_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None:
|
||||
@@ -428,80 +426,3 @@ async def test_dhcp_preserves_existing_mac(
|
||||
assert entry.data[CONF_HOST] == "192.168.0.123" # IP was updated
|
||||
assert entry.data[CONF_MAC] == "existing11aabb" # MAC remains unchanged
|
||||
assert entry.data[CONF_MAC] != "different22ccdd" # Not updated to new MAC
|
||||
|
||||
|
||||
async def _start_reconfigure_flow(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry, host_ip: str
|
||||
) -> Any:
|
||||
"""Initialize a reconfigure flow for PoolDose and submit new host."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass)
|
||||
|
||||
assert reconfigure_result["type"] is FlowResultType.FORM
|
||||
assert reconfigure_result["step_id"] == "reconfigure"
|
||||
|
||||
return await hass.config_entries.flow.async_configure(
|
||||
reconfigure_result["flow_id"], {CONF_HOST: host_ip}
|
||||
)
|
||||
|
||||
|
||||
async def test_reconfigure_flow_success(
|
||||
hass: HomeAssistant,
|
||||
mock_pooldose_client: AsyncMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test successful reconfigure updates host and reloads entry."""
|
||||
# Ensure the mocked device returns the same serial number as the
|
||||
# config entry so the reconfigure flow matches the device
|
||||
mock_pooldose_client.device_info = {"SERIAL_NUMBER": mock_config_entry.unique_id}
|
||||
|
||||
result = await _start_reconfigure_flow(hass, mock_config_entry, "192.168.0.200")
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reconfigure_successful"
|
||||
|
||||
# Config entry should have updated host
|
||||
assert mock_config_entry.data.get(CONF_HOST) == "192.168.0.200"
|
||||
|
||||
freezer.tick(timedelta(seconds=5))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Config entry should have updated host
|
||||
entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id)
|
||||
assert entry is not None
|
||||
assert entry.data.get(CONF_HOST) == "192.168.0.200"
|
||||
|
||||
|
||||
async def test_reconfigure_flow_cannot_connect(
|
||||
hass: HomeAssistant,
|
||||
mock_pooldose_client: AsyncMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test reconfigure shows cannot_connect when device unreachable."""
|
||||
mock_pooldose_client.connect.return_value = RequestStatus.HOST_UNREACHABLE
|
||||
|
||||
result = await _start_reconfigure_flow(hass, mock_config_entry, "192.168.0.200")
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
|
||||
|
||||
async def test_reconfigure_flow_wrong_device(
|
||||
hass: HomeAssistant,
|
||||
mock_pooldose_client: AsyncMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test reconfigure aborts when serial number doesn't match existing entry."""
|
||||
# Return device info with different serial number
|
||||
mock_pooldose_client.device_info = {"SERIAL_NUMBER": "OTHER123"}
|
||||
|
||||
result = await _start_reconfigure_flow(hass, mock_config_entry, "192.168.0.200")
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "wrong_device"
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
"""Test Pooldose diagnostics."""
|
||||
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.diagnostics import get_diagnostics_for_config_entry
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
async def test_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
init_integration: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test config entry diagnostics."""
|
||||
result = await get_diagnostics_for_config_entry(hass, hass_client, init_integration)
|
||||
assert result == snapshot
|
||||
@@ -185,7 +185,6 @@ def _init_host_mock(host_mock: MagicMock) -> None:
|
||||
host_mock.baichuan.smart_ai_type_list.return_value = ["people"]
|
||||
host_mock.baichuan.smart_ai_index.return_value = 1
|
||||
host_mock.baichuan.smart_ai_name.return_value = "zone1"
|
||||
host_mock.whiteled_brightness.return_value = None
|
||||
|
||||
def ai_detect_type(channel: int, object_type: str) -> str | None:
|
||||
if object_type == "people":
|
||||
|
||||
@@ -74,7 +74,6 @@ async def test_light_turn_off(
|
||||
) -> None:
|
||||
"""Test light turn off service."""
|
||||
reolink_host.whiteled_color_temperature.return_value = 3000
|
||||
reolink_host.whiteled_brightness.return_value = 75
|
||||
|
||||
with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]):
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
@@ -82,8 +81,6 @@ async def test_light_turn_off(
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
entity_id = f"{Platform.LIGHT}.{TEST_CAM_NAME}_floodlight"
|
||||
state = hass.states.get(entity_id)
|
||||
assert state and state.attributes.get(ATTR_BRIGHTNESS) == 191
|
||||
|
||||
await hass.services.async_call(
|
||||
LIGHT_DOMAIN,
|
||||
@@ -110,7 +107,6 @@ async def test_light_turn_on(
|
||||
) -> None:
|
||||
"""Test light turn on service."""
|
||||
reolink_host.whiteled_color_temperature.return_value = 3000
|
||||
reolink_host.whiteled_brightness.return_value = None
|
||||
|
||||
with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]):
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
@@ -22,7 +22,6 @@ from roborock.data import (
|
||||
RoborockBase,
|
||||
RoborockDyadStateCode,
|
||||
ValleyElectricityTimer,
|
||||
WorkStatusMapping,
|
||||
ZeoError,
|
||||
ZeoState,
|
||||
)
|
||||
@@ -111,33 +110,7 @@ def create_zeo_trait() -> Mock:
|
||||
def create_b01_q7_trait() -> Mock:
|
||||
"""Create B01 Q7 trait for B01 devices."""
|
||||
b01_trait = AsyncMock()
|
||||
b01_trait._props_data = deepcopy(Q7_B01_PROPS)
|
||||
|
||||
async def query_values_side_effect(protocols):
|
||||
return b01_trait._props_data
|
||||
|
||||
b01_trait.query_values = AsyncMock(side_effect=query_values_side_effect)
|
||||
|
||||
# Add API methods that update the state when called
|
||||
async def start_clean_side_effect():
|
||||
b01_trait._props_data.status = WorkStatusMapping.SWEEP_MOPING
|
||||
|
||||
async def pause_clean_side_effect():
|
||||
b01_trait._props_data.status = WorkStatusMapping.PAUSED
|
||||
|
||||
async def stop_clean_side_effect():
|
||||
b01_trait._props_data.status = WorkStatusMapping.WAITING_FOR_ORDERS
|
||||
|
||||
async def return_to_dock_side_effect():
|
||||
b01_trait._props_data.status = WorkStatusMapping.DOCKING
|
||||
|
||||
b01_trait.start_clean = AsyncMock(side_effect=start_clean_side_effect)
|
||||
b01_trait.pause_clean = AsyncMock(side_effect=pause_clean_side_effect)
|
||||
b01_trait.stop_clean = AsyncMock(side_effect=stop_clean_side_effect)
|
||||
b01_trait.return_to_dock = AsyncMock(side_effect=return_to_dock_side_effect)
|
||||
b01_trait.find_me = AsyncMock()
|
||||
b01_trait.set_fan_speed = AsyncMock()
|
||||
b01_trait.send = AsyncMock()
|
||||
b01_trait.query_values.return_value = Q7_B01_PROPS
|
||||
return b01_trait
|
||||
|
||||
|
||||
@@ -175,20 +148,6 @@ class FakeDevice(RoborockDevice):
|
||||
"""Close the device."""
|
||||
|
||||
|
||||
def set_trait_attributes(
|
||||
trait: AsyncMock,
|
||||
dataclass_template: RoborockBase,
|
||||
init_none: bool = False,
|
||||
) -> None:
|
||||
"""Set attributes on a mock roborock trait."""
|
||||
template_copy = deepcopy(dataclass_template)
|
||||
for attr_name in dir(template_copy):
|
||||
if attr_name.startswith("_"):
|
||||
continue
|
||||
attr_value = getattr(template_copy, attr_name) if not init_none else None
|
||||
setattr(trait, attr_name, attr_value)
|
||||
|
||||
|
||||
def make_mock_trait(
|
||||
trait_spec: type[V1TraitMixin] | None = None,
|
||||
dataclass_template: RoborockBase | None = None,
|
||||
@@ -197,14 +156,12 @@ def make_mock_trait(
|
||||
trait = AsyncMock(spec=trait_spec or V1TraitMixin)
|
||||
if dataclass_template is not None:
|
||||
# Copy all attributes and property methods (e.g. computed properties)
|
||||
# on the first call to refresh(). The object starts uninitialized.
|
||||
set_trait_attributes(trait, dataclass_template, init_none=True)
|
||||
|
||||
async def refresh() -> None:
|
||||
if dataclass_template is not None:
|
||||
set_trait_attributes(trait, dataclass_template)
|
||||
|
||||
trait.refresh = AsyncMock(side_effect=refresh)
|
||||
template_copy = deepcopy(dataclass_template)
|
||||
for attr_name in dir(template_copy):
|
||||
if attr_name.startswith("_"):
|
||||
continue
|
||||
setattr(trait, attr_name, getattr(template_copy, attr_name))
|
||||
trait.refresh = AsyncMock()
|
||||
return trait
|
||||
|
||||
|
||||
|
||||
@@ -1,491 +0,0 @@
|
||||
# serializer version: 1
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_charging-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_charging',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.BATTERY_CHARGING: 'battery_charging'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Charging',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'battery_charging_device_2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_charging-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'battery_charging',
|
||||
'friendly_name': 'Roborock S7 2 Charging',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_charging',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_cleaning-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_cleaning',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.RUNNING: 'running'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Cleaning',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'in_cleaning',
|
||||
'unique_id': 'in_cleaning_device_2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_cleaning-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'running',
|
||||
'friendly_name': 'Roborock S7 2 Cleaning',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_cleaning',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_mop_attached-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_mop_attached',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.CONNECTIVITY: 'connectivity'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Mop attached',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'mop_attached',
|
||||
'unique_id': 'water_box_carriage_status_device_2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_mop_attached-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'connectivity',
|
||||
'friendly_name': 'Roborock S7 2 Mop attached',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_mop_attached',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_water_box_attached-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_water_box_attached',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.CONNECTIVITY: 'connectivity'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Water box attached',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'water_box_attached',
|
||||
'unique_id': 'water_box_status_device_2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_water_box_attached-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'connectivity',
|
||||
'friendly_name': 'Roborock S7 2 Water box attached',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_water_box_attached',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_water_shortage-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_water_shortage',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Water shortage',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'water_shortage',
|
||||
'unique_id': 'water_shortage_device_2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_2_water_shortage-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Roborock S7 2 Water shortage',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_2_water_shortage',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_charging-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_charging',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.BATTERY_CHARGING: 'battery_charging'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Charging',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'battery_charging_abc123',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_charging-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'battery_charging',
|
||||
'friendly_name': 'Roborock S7 MaxV Charging',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_charging',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_cleaning-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_cleaning',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.RUNNING: 'running'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Cleaning',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'in_cleaning',
|
||||
'unique_id': 'in_cleaning_abc123',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_cleaning-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'running',
|
||||
'friendly_name': 'Roborock S7 MaxV Cleaning',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_cleaning',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_mop_attached-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_mop_attached',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.CONNECTIVITY: 'connectivity'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Mop attached',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'mop_attached',
|
||||
'unique_id': 'water_box_carriage_status_abc123',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_mop_attached-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'connectivity',
|
||||
'friendly_name': 'Roborock S7 MaxV Mop attached',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_mop_attached',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_water_box_attached-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_water_box_attached',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.CONNECTIVITY: 'connectivity'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Water box attached',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'water_box_attached',
|
||||
'unique_id': 'water_box_status_abc123',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_water_box_attached-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'connectivity',
|
||||
'friendly_name': 'Roborock S7 MaxV Water box attached',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_water_box_attached',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_water_shortage-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_water_shortage',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Water shortage',
|
||||
'platform': 'roborock',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'water_shortage',
|
||||
'unique_id': 'water_shortage_abc123',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.roborock_s7_maxv_water_shortage-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Roborock S7 MaxV Water shortage',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.roborock_s7_maxv_water_shortage',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,11 @@
|
||||
"""Test Roborock Binary Sensor."""
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -17,10 +15,17 @@ def platforms() -> list[Platform]:
|
||||
|
||||
|
||||
async def test_binary_sensors(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_entry: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
hass: HomeAssistant, setup_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test binary sensors and check test values are correctly set."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_entry.entry_id)
|
||||
assert len(hass.states.async_all("binary_sensor")) == 10
|
||||
assert hass.states.get("binary_sensor.roborock_s7_maxv_mop_attached").state == "on"
|
||||
assert (
|
||||
hass.states.get("binary_sensor.roborock_s7_maxv_water_box_attached").state
|
||||
== "on"
|
||||
)
|
||||
assert (
|
||||
hass.states.get("binary_sensor.roborock_s7_maxv_water_shortage").state == "off"
|
||||
)
|
||||
assert hass.states.get("binary_sensor.roborock_s7_maxv_cleaning").state == "off"
|
||||
assert hass.states.get("binary_sensor.roborock_s7_maxv_charging").state == "on"
|
||||
|
||||
@@ -5,9 +5,8 @@ from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -18,9 +17,8 @@ def platforms() -> list[Platform]:
|
||||
|
||||
async def test_sensors(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_entry: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test sensors and check test values are correctly set."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_entry.entry_id)
|
||||
assert snapshot == hass.states.async_all("sensor")
|
||||
|
||||
@@ -31,15 +31,12 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .conftest import FakeDevice, set_trait_attributes
|
||||
from .mock_data import STATUS
|
||||
from .conftest import FakeDevice
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
ENTITY_ID = "vacuum.roborock_s7_maxv"
|
||||
DEVICE_ID = "abc123"
|
||||
Q7_ENTITY_ID = "vacuum.roborock_q7"
|
||||
Q7_DEVICE_ID = "q7_duid"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -135,14 +132,8 @@ async def test_resume_cleaning(
|
||||
vacuum_command: Mock,
|
||||
) -> None:
|
||||
"""Test resuming clean on start button when a clean is paused."""
|
||||
|
||||
async def refresh_properties() -> None:
|
||||
set_trait_attributes(fake_vacuum.v1_properties.status, STATUS)
|
||||
fake_vacuum.v1_properties.status.in_cleaning = in_cleaning_int
|
||||
fake_vacuum.v1_properties.status.in_returning = in_returning_int
|
||||
|
||||
fake_vacuum.v1_properties.status.refresh.side_effect = refresh_properties
|
||||
|
||||
fake_vacuum.v1_properties.status.in_cleaning = in_cleaning_int
|
||||
fake_vacuum.v1_properties.status.in_returning = in_returning_int
|
||||
await async_setup_component(hass, DOMAIN, {})
|
||||
vacuum = hass.states.get(ENTITY_ID)
|
||||
assert vacuum
|
||||
@@ -279,220 +270,3 @@ async def test_get_current_position_no_robot_position(
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
|
||||
# Tests for RoborockQ7Vacuum
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_q7_vacuum(fake_devices: list[FakeDevice]) -> FakeDevice:
|
||||
"""Get the fake Q7 vacuum device."""
|
||||
# The Q7 is the fourth device in the list (index 3) based on HOME_DATA
|
||||
return fake_devices[3]
|
||||
|
||||
|
||||
@pytest.fixture(name="q7_vacuum_api", autouse=False)
|
||||
def fake_q7_vacuum_api_fixture(
|
||||
fake_q7_vacuum: FakeDevice,
|
||||
send_message_exception: Exception | None,
|
||||
) -> Mock:
|
||||
"""Get the fake Q7 vacuum device API for asserting that commands happened."""
|
||||
assert fake_q7_vacuum.b01_q7_properties is not None
|
||||
api = fake_q7_vacuum.b01_q7_properties
|
||||
if send_message_exception is not None:
|
||||
# For exception tests, override side effects to raise the exception
|
||||
api.start_clean.side_effect = send_message_exception
|
||||
api.pause_clean.side_effect = send_message_exception
|
||||
api.stop_clean.side_effect = send_message_exception
|
||||
api.return_to_dock.side_effect = send_message_exception
|
||||
api.find_me.side_effect = send_message_exception
|
||||
api.set_fan_speed.side_effect = send_message_exception
|
||||
api.send.side_effect = send_message_exception
|
||||
return api
|
||||
|
||||
|
||||
async def test_q7_registry_entries(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
setup_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Tests Q7 devices are registered in the entity registry."""
|
||||
entity_entry = entity_registry.async_get(Q7_ENTITY_ID)
|
||||
assert entity_entry.unique_id == Q7_DEVICE_ID
|
||||
|
||||
device_entry = device_registry.async_get(entity_entry.device_id)
|
||||
assert device_entry is not None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service", "api_method", "service_params", "expected_activity"),
|
||||
[
|
||||
(SERVICE_START, "start_clean", None, "cleaning"),
|
||||
(SERVICE_PAUSE, "pause_clean", None, "paused"),
|
||||
(SERVICE_STOP, "stop_clean", None, "idle"),
|
||||
(SERVICE_RETURN_TO_BASE, "return_to_dock", None, "returning"),
|
||||
],
|
||||
)
|
||||
async def test_q7_state_changing_commands(
|
||||
hass: HomeAssistant,
|
||||
setup_entry: MockConfigEntry,
|
||||
service: str,
|
||||
api_method: str,
|
||||
service_params: dict[str, Any] | None,
|
||||
expected_activity: str,
|
||||
q7_vacuum_api: Mock,
|
||||
fake_q7_vacuum: FakeDevice,
|
||||
) -> None:
|
||||
"""Test sending state-changing commands to the Q7 vacuum."""
|
||||
vacuum = hass.states.get(Q7_ENTITY_ID)
|
||||
assert vacuum
|
||||
|
||||
data = {ATTR_ENTITY_ID: Q7_ENTITY_ID, **(service_params or {})}
|
||||
await hass.services.async_call(
|
||||
Platform.VACUUM,
|
||||
service,
|
||||
data,
|
||||
blocking=True,
|
||||
)
|
||||
api_call = getattr(q7_vacuum_api, api_method)
|
||||
assert api_call.call_count == 1
|
||||
assert api_call.call_args[0] == ()
|
||||
|
||||
# Verify the entity state was updated
|
||||
assert fake_q7_vacuum.b01_q7_properties is not None
|
||||
# Force coordinator refresh to get updated state
|
||||
coordinator = setup_entry.runtime_data.b01[0]
|
||||
|
||||
await coordinator.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
vacuum = hass.states.get(Q7_ENTITY_ID)
|
||||
assert vacuum
|
||||
assert vacuum.state == expected_activity
|
||||
|
||||
|
||||
async def test_q7_locate_command(
|
||||
hass: HomeAssistant,
|
||||
setup_entry: MockConfigEntry,
|
||||
q7_vacuum_api: Mock,
|
||||
) -> None:
|
||||
"""Test sending locate command to the Q7 vacuum."""
|
||||
vacuum = hass.states.get(Q7_ENTITY_ID)
|
||||
assert vacuum
|
||||
|
||||
await hass.services.async_call(
|
||||
Platform.VACUUM,
|
||||
SERVICE_LOCATE,
|
||||
{ATTR_ENTITY_ID: Q7_ENTITY_ID},
|
||||
blocking=True,
|
||||
)
|
||||
assert q7_vacuum_api.find_me.call_count == 1
|
||||
assert q7_vacuum_api.find_me.call_args[0] == ()
|
||||
|
||||
|
||||
async def test_q7_set_fan_speed_command(
|
||||
hass: HomeAssistant,
|
||||
setup_entry: MockConfigEntry,
|
||||
q7_vacuum_api: Mock,
|
||||
) -> None:
|
||||
"""Test sending set_fan_speed command to the Q7 vacuum."""
|
||||
vacuum = hass.states.get(Q7_ENTITY_ID)
|
||||
assert vacuum
|
||||
|
||||
await hass.services.async_call(
|
||||
Platform.VACUUM,
|
||||
SERVICE_SET_FAN_SPEED,
|
||||
{ATTR_ENTITY_ID: Q7_ENTITY_ID, "fan_speed": "quiet"},
|
||||
blocking=True,
|
||||
)
|
||||
assert q7_vacuum_api.set_fan_speed.call_count == 1
|
||||
# set_fan_speed is called with the fan speed value as first argument
|
||||
assert len(q7_vacuum_api.set_fan_speed.call_args[0]) == 1
|
||||
|
||||
|
||||
async def test_q7_send_command(
|
||||
hass: HomeAssistant,
|
||||
setup_entry: MockConfigEntry,
|
||||
q7_vacuum_api: Mock,
|
||||
) -> None:
|
||||
"""Test sending custom command to the Q7 vacuum."""
|
||||
vacuum = hass.states.get(Q7_ENTITY_ID)
|
||||
assert vacuum
|
||||
|
||||
await hass.services.async_call(
|
||||
Platform.VACUUM,
|
||||
SERVICE_SEND_COMMAND,
|
||||
{ATTR_ENTITY_ID: Q7_ENTITY_ID, "command": "test_command"},
|
||||
blocking=True,
|
||||
)
|
||||
assert q7_vacuum_api.send.call_count == 1
|
||||
# send is called with command as first argument and params as second
|
||||
assert q7_vacuum_api.send.call_args[0] == ("test_command", None)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service", "api_method", "service_params"),
|
||||
[
|
||||
(SERVICE_START, "start_clean", None),
|
||||
(SERVICE_PAUSE, "pause_clean", None),
|
||||
(SERVICE_STOP, "stop_clean", None),
|
||||
(SERVICE_RETURN_TO_BASE, "return_to_dock", None),
|
||||
(SERVICE_LOCATE, "find_me", None),
|
||||
(SERVICE_SET_FAN_SPEED, "set_fan_speed", {"fan_speed": "quiet"}),
|
||||
(SERVICE_SEND_COMMAND, "send", {"command": "test_command"}),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("send_message_exception", [RoborockException()])
|
||||
async def test_q7_failed_commands(
|
||||
hass: HomeAssistant,
|
||||
setup_entry: MockConfigEntry,
|
||||
service: str,
|
||||
api_method: str,
|
||||
service_params: dict[str, Any] | None,
|
||||
q7_vacuum_api: Mock,
|
||||
) -> None:
|
||||
"""Test that when Q7 commands fail, we raise HomeAssistantError."""
|
||||
vacuum = hass.states.get(Q7_ENTITY_ID)
|
||||
assert vacuum
|
||||
# Store the original state to verify it doesn't change on error
|
||||
original_state = vacuum.state
|
||||
|
||||
data = {ATTR_ENTITY_ID: Q7_ENTITY_ID, **(service_params or {})}
|
||||
command_name = (
|
||||
service_params.get("command", api_method) if service_params else api_method
|
||||
)
|
||||
|
||||
with pytest.raises(HomeAssistantError, match=f"Error while calling {command_name}"):
|
||||
await hass.services.async_call(
|
||||
Platform.VACUUM,
|
||||
service,
|
||||
data,
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
# Verify the entity state remains unchanged after failed command
|
||||
await hass.async_block_till_done()
|
||||
vacuum = hass.states.get(Q7_ENTITY_ID)
|
||||
assert vacuum
|
||||
assert vacuum.state == original_state
|
||||
|
||||
|
||||
async def test_q7_activity_none_status(
|
||||
hass: HomeAssistant,
|
||||
setup_entry: MockConfigEntry,
|
||||
fake_q7_vacuum: FakeDevice,
|
||||
) -> None:
|
||||
"""Test that activity returns None when status is None."""
|
||||
assert fake_q7_vacuum.b01_q7_properties is not None
|
||||
# Set status to None
|
||||
fake_q7_vacuum.b01_q7_properties._props_data.status = None
|
||||
|
||||
# Force coordinator refresh to get updated state
|
||||
coordinator = setup_entry.runtime_data.b01[0]
|
||||
await coordinator.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify the entity state is unknown when status is None
|
||||
vacuum = hass.states.get(Q7_ENTITY_ID)
|
||||
assert vacuum
|
||||
assert vacuum.state == "unknown"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test siren triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -67,7 +66,7 @@ async def test_siren_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="siren.turned_off",
|
||||
@@ -89,7 +88,6 @@ async def test_siren_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the siren state trigger fires when any siren state changes to a specific state."""
|
||||
@@ -125,7 +123,7 @@ async def test_siren_state_trigger_behavior_any(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="siren.turned_off",
|
||||
@@ -147,7 +145,6 @@ async def test_siren_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the siren state trigger fires when the first siren changes to a specific state."""
|
||||
@@ -182,7 +179,7 @@ async def test_siren_state_trigger_behavior_first(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="siren.turned_off",
|
||||
@@ -204,7 +201,6 @@ async def test_siren_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the siren state trigger fires when the last siren changes to a specific state."""
|
||||
|
||||
@@ -11,23 +11,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Les P'tits Bateaux",
|
||||
"parent_id": "FV:2",
|
||||
"item_id": "FV:2/11",
|
||||
"album_art_uri": "https://www.radiofrance.fr/s3/cruiser-production/2023/05/67b86ee2-78c7-41a9-9d89-62c95e619036/1000x1000_sc_les-p-tits-bateaux.jpg",
|
||||
"resource_meta_data": "<DIDL-Lite xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:upnp=\"urn:schemas-upnp-org:metadata-1-0/upnp/\" xmlns:r=\"urn:schemas-rinconnetworks-com:metadata-1-0/\" xmlns=\"urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/\"><item id=\"10fe206cpodcast%3A3d680a63-ec3d-11e1-a7b7-782bcb76618d\" parentID=\"10fe206cpodcast%3A3d680a63-ec3d-11e1-a7b7-782bcb76618d\" restricted=\"true\"><dc:title>Les P'tits Bateaux</dc:title><upnp:class>object.container.podcast</upnp:class><desc id=\"cdudn\" nameSpace=\"urn:schemas-rinconnetworks-com:metadata-1-0/\">SA_RINCON149767_</desc></item></DIDL-Lite>",
|
||||
"resources": [
|
||||
{
|
||||
"uri": "x-rincon-cpcontainer:10fe206cpodcast%3A3d680a63-ec3d-11e1-a7b7-782bcb76618d?sid=585&flags=8300&sn=3",
|
||||
"protocol_info": "x-rincon-cpcontainer:*:*:*"
|
||||
}
|
||||
],
|
||||
"desc": null,
|
||||
"type": "instantPlay",
|
||||
"description": "Radio France",
|
||||
"favorite_nr": "10"
|
||||
},
|
||||
{
|
||||
"title": "James Taylor Radio",
|
||||
"parent_id": "FV:2",
|
||||
|
||||
@@ -27,17 +27,6 @@
|
||||
'thumbnail': None,
|
||||
'title': 'Playlists',
|
||||
}),
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'podcast',
|
||||
'media_content_id': 'object.container.podcast',
|
||||
'media_content_type': 'favorites_folder',
|
||||
'thumbnail': None,
|
||||
'title': 'Podcast',
|
||||
}),
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
@@ -97,33 +86,6 @@
|
||||
'title': 'Albums',
|
||||
})
|
||||
# ---
|
||||
# name: test_browse_media_favorites[object.container.podcast-favorites_folder]
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children': list([
|
||||
dict({
|
||||
'can_expand': False,
|
||||
'can_play': True,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'podcast',
|
||||
'media_content_id': 'FV:2/11',
|
||||
'media_content_type': 'favorite_item_id',
|
||||
'thumbnail': 'https://www.radiofrance.fr/s3/cruiser-production/2023/05/67b86ee2-78c7-41a9-9d89-62c95e619036/1000x1000_sc_les-p-tits-bateaux.jpg',
|
||||
'title': "Les P'tits Bateaux",
|
||||
}),
|
||||
]),
|
||||
'children_media_class': 'podcast',
|
||||
'media_class': 'directory',
|
||||
'media_content_id': '',
|
||||
'media_content_type': 'favorites',
|
||||
'not_shown': 0,
|
||||
'thumbnail': None,
|
||||
'title': 'Podcast',
|
||||
})
|
||||
# ---
|
||||
# name: test_browse_media_favorites[object.item.audioItem.audioBook-favorites_folder]
|
||||
dict({
|
||||
'can_expand': True,
|
||||
|
||||
@@ -202,10 +202,6 @@ async def test_browse_media_library_albums(
|
||||
"object.container.album.musicAlbum",
|
||||
"favorites_folder",
|
||||
),
|
||||
(
|
||||
"object.container.podcast",
|
||||
"favorites_folder",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_browse_media_favorites(
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import MockSoCo, create_zgs_sonos_event, group_speakers, ungroup_speakers
|
||||
from .conftest import MockSoCo, group_speakers, ungroup_speakers
|
||||
|
||||
|
||||
async def test_media_player_join(
|
||||
@@ -134,7 +134,6 @@ async def test_media_player_join_timeout(
|
||||
"Timeout while waiting for Sonos player to join the "
|
||||
"group Living Room: Living Room, Bedroom"
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.sonos.speaker.asyncio.timeout", instant_timeout
|
||||
@@ -248,65 +247,3 @@ async def test_media_player_unjoin_already_unjoined(
|
||||
# Should not have called unjoin, since the speakers are already unjoined.
|
||||
assert soco_bedroom.unjoin.call_count == 0
|
||||
assert soco_living_room.unjoin.call_count == 0
|
||||
|
||||
|
||||
async def test_unjoin_completes_when_coordinator_receives_event_first(
|
||||
hass: HomeAssistant,
|
||||
sonos_setup_two_speakers: list[MockSoCo],
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that unjoin completes even when only coordinator receives ZGS event."""
|
||||
soco_living_room = sonos_setup_two_speakers[0]
|
||||
soco_bedroom = sonos_setup_two_speakers[1]
|
||||
|
||||
# First, group the speakers together
|
||||
group_speakers(soco_living_room, soco_bedroom)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
# Verify initial grouped state
|
||||
expected_group = ["media_player.living_room", "media_player.bedroom"]
|
||||
assert (
|
||||
hass.states.get("media_player.living_room").attributes["group_members"]
|
||||
== expected_group
|
||||
)
|
||||
assert (
|
||||
hass.states.get("media_player.bedroom").attributes["group_members"]
|
||||
== expected_group
|
||||
)
|
||||
|
||||
unjoin_complete_event = asyncio.Event()
|
||||
|
||||
def mock_unjoin(*args, **kwargs) -> None:
|
||||
hass.loop.call_soon_threadsafe(unjoin_complete_event.set)
|
||||
|
||||
soco_bedroom.unjoin = Mock(side_effect=mock_unjoin)
|
||||
|
||||
with caplog.at_level(logging.WARNING):
|
||||
caplog.clear()
|
||||
await hass.services.async_call(
|
||||
MP_DOMAIN,
|
||||
SERVICE_UNJOIN,
|
||||
{ATTR_ENTITY_ID: "media_player.bedroom"},
|
||||
blocking=False,
|
||||
)
|
||||
await unjoin_complete_event.wait()
|
||||
|
||||
# Fire ZGS event only to coordinator to test clearing of bedroom speaker
|
||||
ungroup_event = create_zgs_sonos_event(
|
||||
"zgs_two_single.xml",
|
||||
soco_living_room,
|
||||
soco_bedroom,
|
||||
create_uui_ds_in_group=False,
|
||||
)
|
||||
soco_living_room.zoneGroupTopology.subscribe.return_value._callback(
|
||||
ungroup_event
|
||||
)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
# Should complete without warnings or timeout errors
|
||||
assert len(caplog.records) == 0
|
||||
assert soco_bedroom.unjoin.call_count == 1
|
||||
state = hass.states.get("media_player.living_room")
|
||||
assert state.attributes["group_members"] == ["media_player.living_room"]
|
||||
state = hass.states.get("media_player.bedroom")
|
||||
assert state.attributes["group_members"] == ["media_player.bedroom"]
|
||||
|
||||
@@ -88,12 +88,3 @@ async def init_integration(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return mock_config_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[MagicMock]:
|
||||
"""Mock async_setup_entry."""
|
||||
with patch(
|
||||
"homeassistant.components.srp_energy.async_setup_entry", return_value=True
|
||||
) as mock_setup_entry:
|
||||
yield mock_setup_entry
|
||||
|
||||
@@ -6,13 +6,7 @@ import pytest
|
||||
|
||||
from homeassistant.components.srp_energy.const import CONF_IS_TOU, DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER, ConfigEntryState
|
||||
from homeassistant.const import (
|
||||
CONF_ID,
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_SOURCE,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.const import CONF_ID, CONF_PASSWORD, CONF_SOURCE, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
@@ -178,111 +172,3 @@ async def test_flow_multiple_configs(
|
||||
entries = hass.config_entries.async_entries()
|
||||
domain_entries = [entry for entry in entries if entry.domain == DOMAIN]
|
||||
assert len(domain_entries) == 2
|
||||
|
||||
|
||||
async def test_reconfigure(
|
||||
hass: HomeAssistant, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test reconfiguring an existing entry."""
|
||||
|
||||
result = await init_integration.start_reconfigure_flow(hass)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_ID: ACCNT_ID,
|
||||
CONF_NAME: ACCNT_NAME + "reconf",
|
||||
CONF_USERNAME: ACCNT_USERNAME + "reconf",
|
||||
CONF_PASSWORD: ACCNT_PASSWORD + "reconf",
|
||||
CONF_IS_TOU: not ACCNT_IS_TOU,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reconfigure_successful"
|
||||
assert init_integration.data == {
|
||||
CONF_ID: ACCNT_ID,
|
||||
CONF_NAME: ACCNT_NAME + "reconf",
|
||||
CONF_USERNAME: ACCNT_USERNAME + "reconf",
|
||||
CONF_PASSWORD: ACCNT_PASSWORD + "reconf",
|
||||
CONF_IS_TOU: not ACCNT_IS_TOU,
|
||||
}
|
||||
|
||||
|
||||
async def test_reconfigure_error(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
mock_srp_energy_config_flow: MagicMock,
|
||||
mock_setup_entry: MagicMock,
|
||||
) -> None:
|
||||
"""Test reconfiguring an existing entry."""
|
||||
|
||||
result = await init_integration.start_reconfigure_flow(hass)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
mock_srp_energy_config_flow.validate.side_effect = ValueError
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_ID: ACCNT_ID,
|
||||
CONF_NAME: ACCNT_NAME + "reconf",
|
||||
CONF_USERNAME: ACCNT_USERNAME + "reconf",
|
||||
CONF_PASSWORD: ACCNT_PASSWORD + "reconf",
|
||||
CONF_IS_TOU: not ACCNT_IS_TOU,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "invalid_account"}
|
||||
|
||||
mock_srp_energy_config_flow.validate.side_effect = None
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_ID: ACCNT_ID,
|
||||
CONF_NAME: ACCNT_NAME + "reconf",
|
||||
CONF_USERNAME: ACCNT_USERNAME + "reconf",
|
||||
CONF_PASSWORD: ACCNT_PASSWORD + "reconf",
|
||||
CONF_IS_TOU: not ACCNT_IS_TOU,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reconfigure_successful"
|
||||
|
||||
|
||||
async def test_reconfigure_unknown_error(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
mock_srp_energy_config_flow: MagicMock,
|
||||
mock_setup_entry: MagicMock,
|
||||
) -> None:
|
||||
"""Test reconfiguring an existing entry and handling unknown error."""
|
||||
|
||||
result = await init_integration.start_reconfigure_flow(hass)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
mock_srp_energy_config_flow.validate.side_effect = Exception
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_ID: ACCNT_ID,
|
||||
CONF_NAME: ACCNT_NAME + "reconf",
|
||||
CONF_USERNAME: ACCNT_USERNAME + "reconf",
|
||||
CONF_PASSWORD: ACCNT_PASSWORD + "reconf",
|
||||
CONF_IS_TOU: not ACCNT_IS_TOU,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "unknown"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test switch triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -67,7 +66,7 @@ async def test_switch_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="switch.turned_off",
|
||||
@@ -89,7 +88,6 @@ async def test_switch_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the switch state trigger fires when any switch state changes to a specific state."""
|
||||
@@ -125,7 +123,7 @@ async def test_switch_state_trigger_behavior_any(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="switch.turned_off",
|
||||
@@ -147,7 +145,6 @@ async def test_switch_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the switch state trigger fires when the first switch changes to a specific state."""
|
||||
@@ -182,7 +179,7 @@ async def test_switch_state_trigger_behavior_first(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="switch.turned_off",
|
||||
@@ -204,7 +201,6 @@ async def test_switch_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the switch state trigger fires when the last switch changes to a specific state."""
|
||||
|
||||
@@ -7,11 +7,7 @@ import pytest
|
||||
from togrill_bluetooth.client import Client
|
||||
from togrill_bluetooth.packets import Packet, PacketA0Notify, PacketNotify
|
||||
|
||||
from homeassistant.components.togrill.const import (
|
||||
CONF_HAS_AMBIENT,
|
||||
CONF_PROBE_COUNT,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.components.togrill.const import CONF_PROBE_COUNT, DOMAIN
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_MODEL
|
||||
|
||||
from . import TOGRILL_SERVICE_INFO
|
||||
@@ -33,21 +29,6 @@ def mock_entry() -> MockConfigEntry:
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_entry_with_ambient() -> MockConfigEntry:
|
||||
"""Create hass config fixture with ambient sensor enabled."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_ADDRESS: TOGRILL_SERVICE_INFO.address,
|
||||
CONF_MODEL: "Pro-05",
|
||||
CONF_PROBE_COUNT: 2,
|
||||
CONF_HAS_AMBIENT: True,
|
||||
},
|
||||
unique_id=TOGRILL_SERVICE_INFO.address,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mock_unload_entry() -> Generator[AsyncMock]:
|
||||
"""Override async_unload_entry."""
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -42,7 +42,6 @@ async def test_user_selection(
|
||||
"address": TOGRILL_SERVICE_INFO.address,
|
||||
"model": "Pro-05",
|
||||
"probe_count": 0,
|
||||
"has_ambient": False,
|
||||
}
|
||||
assert result["title"] == "Pro-05"
|
||||
assert result["result"].unique_id == TOGRILL_SERVICE_INFO.address
|
||||
@@ -177,7 +176,6 @@ async def test_bluetooth(
|
||||
"address": TOGRILL_SERVICE_INFO.address,
|
||||
"model": "Pro-05",
|
||||
"probe_count": 0,
|
||||
"has_ambient": False,
|
||||
}
|
||||
assert result["title"] == "Pro-05"
|
||||
assert result["result"].unique_id == TOGRILL_SERVICE_INFO.address
|
||||
|
||||
@@ -70,50 +70,6 @@ async def test_setup(
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"packets",
|
||||
[
|
||||
pytest.param([], id="no_data"),
|
||||
pytest.param(
|
||||
[PacketA1Notify([10, 20, 25])],
|
||||
id="ambient_temp_data",
|
||||
),
|
||||
pytest.param(
|
||||
[PacketA1Notify([10, None, 25])],
|
||||
id="ambient_temp_with_missing_probe",
|
||||
),
|
||||
pytest.param(
|
||||
[PacketA1Notify([])],
|
||||
id="ambient_empty_temperatures",
|
||||
),
|
||||
pytest.param(
|
||||
[PacketA1Notify([10, 20, None])],
|
||||
id="ambient_temp_none",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_setup_with_ambient(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_entry_with_ambient: MockConfigEntry,
|
||||
mock_client: Mock,
|
||||
packets,
|
||||
) -> None:
|
||||
"""Test the sensors with ambient temperature sensor enabled."""
|
||||
|
||||
inject_bluetooth_service_info(hass, TOGRILL_SERVICE_INFO)
|
||||
|
||||
await setup_entry(hass, mock_entry_with_ambient, [Platform.SENSOR])
|
||||
|
||||
for packet in packets:
|
||||
mock_client.mocked_notify(packet)
|
||||
|
||||
await snapshot_platform(
|
||||
hass, entity_registry, snapshot, mock_entry_with_ambient.entry_id
|
||||
)
|
||||
|
||||
|
||||
async def test_device_disconnected(
|
||||
hass: HomeAssistant,
|
||||
mock_entry: MockConfigEntry,
|
||||
@@ -160,25 +116,3 @@ async def test_device_discovered(
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == "0"
|
||||
|
||||
|
||||
async def test_ambient_sensor_not_created_without_config(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_entry: MockConfigEntry,
|
||||
mock_client: Mock,
|
||||
) -> None:
|
||||
"""Test ambient temperature sensor is not created when not configured."""
|
||||
inject_bluetooth_service_info(hass, TOGRILL_SERVICE_INFO)
|
||||
|
||||
await setup_entry(hass, mock_entry, [Platform.SENSOR])
|
||||
|
||||
entity_id = "sensor.pro_05_ambient_temperature"
|
||||
|
||||
# Entity should not exist when CONF_HAS_AMBIENT is not set
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is None
|
||||
|
||||
# Verify it's not in the entity registry
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry is None
|
||||
|
||||
@@ -346,9 +346,9 @@ async def test_instant_arming_exceptions(
|
||||
(ArmingState.ARMED_STAY_PROA7, AlarmControlPanelState.ARMED_HOME),
|
||||
(ArmingState.ARMED_STAY_BYPASS, AlarmControlPanelState.ARMED_HOME),
|
||||
(ArmingState.ARMED_STAY_BYPASS_PROA7, AlarmControlPanelState.ARMED_HOME),
|
||||
(ArmingState.ARMED_STAY_INSTANT, AlarmControlPanelState.ARMED_NIGHT),
|
||||
(ArmingState.ARMED_STAY_INSTANT, AlarmControlPanelState.ARMED_HOME),
|
||||
(ArmingState.ARMED_STAY_INSTANT_PROA7, AlarmControlPanelState.ARMED_HOME),
|
||||
(ArmingState.ARMED_STAY_INSTANT_BYPASS, AlarmControlPanelState.ARMED_NIGHT),
|
||||
(ArmingState.ARMED_STAY_INSTANT_BYPASS, AlarmControlPanelState.ARMED_HOME),
|
||||
(
|
||||
ArmingState.ARMED_STAY_INSTANT_BYPASS_PROA7,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
|
||||
@@ -52,22 +52,6 @@ def mock_tractive_client() -> Generator[AsyncMock]:
|
||||
}
|
||||
entry.runtime_data.client._send_wellness_update(event)
|
||||
|
||||
def send_health_overview_event(
|
||||
entry: MockConfigEntry, event: dict[str, Any] | None = None
|
||||
):
|
||||
"""Send health overview event."""
|
||||
if event is None:
|
||||
event = {
|
||||
"petId": "pet_id_123",
|
||||
"sleep": {
|
||||
"minutesDaySleep": 100,
|
||||
"minutesNightSleep": 300,
|
||||
"minutesCalm": 122,
|
||||
},
|
||||
"activity": {"minutesGoal": 200, "minutesActive": 150},
|
||||
}
|
||||
entry.runtime_data.client.send_health_overview_update(event)
|
||||
|
||||
def send_position_event(
|
||||
entry: MockConfigEntry, event: dict[str, Any] | None = None
|
||||
):
|
||||
@@ -128,24 +112,8 @@ def mock_tractive_client() -> Generator[AsyncMock]:
|
||||
set_led_active=AsyncMock(return_value={"pending": True}),
|
||||
)
|
||||
|
||||
client.trackable_object.return_value = Mock(
|
||||
spec=TrackableObject,
|
||||
health_overview=AsyncMock(
|
||||
return_value={
|
||||
"petId": "pet_id_123",
|
||||
"sleep": {
|
||||
"minutesDaySleep": 100,
|
||||
"minutesNightSleep": 300,
|
||||
"minutesCalm": 122,
|
||||
},
|
||||
"activity": {"minutesGoal": 200, "minutesActive": 150},
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
client.send_hardware_event = send_hardware_event
|
||||
client.send_wellness_event = send_wellness_event
|
||||
client.send_health_overview_event = send_health_overview_event
|
||||
client.send_position_event = send_position_event
|
||||
client.send_switch_event = send_switch_event
|
||||
client.send_server_unavailable_event = send_server_unavailable_event
|
||||
|
||||
@@ -26,6 +26,5 @@ async def test_sensor(
|
||||
|
||||
mock_tractive_client.send_hardware_event(mock_config_entry)
|
||||
mock_tractive_client.send_wellness_event(mock_config_entry)
|
||||
mock_tractive_client.send_health_overview_event(mock_config_entry)
|
||||
await hass.async_block_till_done()
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test update triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -66,7 +65,7 @@ async def test_update_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="update.update_became_available",
|
||||
@@ -83,7 +82,6 @@ async def test_update_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the update state trigger fires when any update state changes to a specific state."""
|
||||
@@ -119,7 +117,7 @@ async def test_update_state_trigger_behavior_any(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="update.update_became_available",
|
||||
@@ -136,7 +134,6 @@ async def test_update_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the update state trigger fires when the first update changes to a specific state."""
|
||||
@@ -171,7 +168,7 @@ async def test_update_state_trigger_behavior_first(
|
||||
parametrize_target_entities(DOMAIN),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="update.update_became_available",
|
||||
@@ -188,7 +185,6 @@ async def test_update_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the update state trigger fires when the last update changes to a specific state."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test vacuum triggers."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -70,7 +69,7 @@ async def test_vacuum_triggers_gated_by_labs_flag(
|
||||
parametrize_target_entities("vacuum"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="vacuum.docked",
|
||||
@@ -102,7 +101,6 @@ async def test_vacuum_state_trigger_behavior_any(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the vacuum state trigger fires when any vacuum state changes to a specific state."""
|
||||
@@ -138,7 +136,7 @@ async def test_vacuum_state_trigger_behavior_any(
|
||||
parametrize_target_entities("vacuum"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="vacuum.docked",
|
||||
@@ -170,7 +168,6 @@ async def test_vacuum_state_trigger_behavior_first(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the vacuum state trigger fires when the first vacuum changes to a specific state."""
|
||||
@@ -205,7 +202,7 @@ async def test_vacuum_state_trigger_behavior_first(
|
||||
parametrize_target_entities("vacuum"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="vacuum.docked",
|
||||
@@ -237,7 +234,6 @@ async def test_vacuum_state_trigger_behavior_last(
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
trigger_options: dict[str, Any],
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the vacuum state trigger fires when the last vacuum changes to a specific state."""
|
||||
|
||||
@@ -341,30 +341,6 @@
|
||||
}),
|
||||
'unit_of_measurement': 'μg/m³',
|
||||
}),
|
||||
dict({
|
||||
'device_class': None,
|
||||
'disabled': False,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': None,
|
||||
'entity_id': 'switch.test_fan_auto_off',
|
||||
'icon': None,
|
||||
'name': None,
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Auto Off',
|
||||
'state': dict({
|
||||
'attributes': dict({
|
||||
'friendly_name': 'Test Fan Auto Off',
|
||||
}),
|
||||
'entity_id': 'switch.test_fan_auto_off',
|
||||
'last_changed': str,
|
||||
'last_reported': str,
|
||||
'last_updated': str,
|
||||
'state': 'unavailable',
|
||||
}),
|
||||
'unit_of_measurement': None,
|
||||
}),
|
||||
dict({
|
||||
'device_class': None,
|
||||
'disabled': False,
|
||||
|
||||
@@ -611,54 +611,8 @@
|
||||
'unique_id': '200s-humidifier4321-display',
|
||||
'unit_of_measurement': None,
|
||||
}),
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': None,
|
||||
'entity_id': 'switch.humidifier_200s_auto_off',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Auto Off',
|
||||
'platform': 'vesync',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'auto_off_config',
|
||||
'unique_id': '200s-humidifier4321-auto_off_config',
|
||||
'unit_of_measurement': None,
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_switch_state[Humidifier 200s][switch.humidifier_200s_auto_off]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Humidifier 200s Auto Off',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.humidifier_200s_auto_off',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_switch_state[Humidifier 200s][switch.humidifier_200s_display]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
@@ -773,54 +727,8 @@
|
||||
'unique_id': '6000s-child_lock',
|
||||
'unit_of_measurement': None,
|
||||
}),
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': None,
|
||||
'entity_id': 'switch.humidifier_6000s_auto_off',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Auto Off',
|
||||
'platform': 'vesync',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'auto_off_config',
|
||||
'unique_id': '6000s-auto_off_config',
|
||||
'unit_of_measurement': None,
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_switch_state[Humidifier 6000s][switch.humidifier_6000s_auto_off]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Humidifier 6000s Auto Off',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.humidifier_6000s_auto_off',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_switch_state[Humidifier 6000s][switch.humidifier_6000s_child_lock]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
@@ -915,54 +823,8 @@
|
||||
'unique_id': '600s-humidifier-display',
|
||||
'unit_of_measurement': None,
|
||||
}),
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': None,
|
||||
'entity_id': 'switch.humidifier_600s_auto_off',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Auto Off',
|
||||
'platform': 'vesync',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'auto_off_config',
|
||||
'unique_id': '600s-humidifier-auto_off_config',
|
||||
'unit_of_measurement': None,
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_switch_state[Humidifier 600S][switch.humidifier_600s_auto_off]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Humidifier 600S Auto Off',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.humidifier_600s_auto_off',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_switch_state[Humidifier 600S][switch.humidifier_600s_display]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
|
||||
@@ -144,7 +144,7 @@ async def test_migrate_config_entry(
|
||||
switch_entities = [
|
||||
e for e in entity_registry.entities.values() if e.domain == "switch"
|
||||
]
|
||||
assert len(switch_entities) == 3
|
||||
assert len(switch_entities) == 2
|
||||
|
||||
humidifier_entities = [
|
||||
e for e in entity_registry.entities.values() if e.domain == "humidifier"
|
||||
|
||||
@@ -27,12 +27,6 @@ def test_has_location_with_states_with_valid_location() -> None:
|
||||
assert location.has_location(state)
|
||||
|
||||
|
||||
def test_has_location_with_states_with_int_location() -> None:
|
||||
"""Test that integer coordinates are valid."""
|
||||
state = State("hello.world", "valid", {ATTR_LATITUDE: 123, ATTR_LONGITUDE: 45})
|
||||
assert location.has_location(state)
|
||||
|
||||
|
||||
def test_closest_with_no_states_with_location() -> None:
|
||||
"""Set up the tests."""
|
||||
state = State("light.test", "on")
|
||||
|
||||
Reference in New Issue
Block a user