mirror of
https://github.com/home-assistant/core.git
synced 2025-12-08 08:58:05 +00:00
Compare commits
27 Commits
device_tra
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2ac15ab67d | ||
|
|
d599bb9553 | ||
|
|
92ee37017d | ||
|
|
adf698d570 | ||
|
|
6ce9a13816 | ||
|
|
9cb9efeb88 | ||
|
|
ca31134caa | ||
|
|
769578dc51 | ||
|
|
9dcabfe804 | ||
|
|
dc6c23a58c | ||
|
|
6ec7efc2b8 | ||
|
|
97e5b7954e | ||
|
|
25505752b7 | ||
|
|
95a347dcf8 | ||
|
|
8c0f3014f7 | ||
|
|
bb3cd3ebd3 | ||
|
|
319d6711c4 | ||
|
|
ea3f76c315 | ||
|
|
b892cc1cad | ||
|
|
3046c7afd8 | ||
|
|
73dc81034e | ||
|
|
f306cde3b6 | ||
|
|
38c5e483a8 | ||
|
|
ce14544ec1 | ||
|
|
87b9c3193e | ||
|
|
061c38d2a7 | ||
|
|
e1720be5a4 |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6
|
||||
uses: github/codeql-action/init@cf1bb45a277cb3c205638b2cd5c984db1c46a412 # v4.31.7
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6
|
||||
uses: github/codeql-action/analyze@cf1bb45a277cb3c205638b2cd5c984db1c46a412 # v4.31.7
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
|
||||
@@ -44,7 +44,7 @@ rules:
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
@@ -54,7 +54,7 @@ rules:
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
|
||||
134
homeassistant/components/airobot/sensor.py
Normal file
134
homeassistant/components/airobot/sensor.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""Sensor platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyairobotrest.models import ThermostatStatus
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Airobot sensor entity."""
|
||||
|
||||
value_fn: Callable[[ThermostatStatus], StateType]
|
||||
supported_fn: Callable[[ThermostatStatus], bool] = lambda _: True
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[AirobotSensorEntityDescription, ...] = (
|
||||
AirobotSensorEntityDescription(
|
||||
key="air_temperature",
|
||||
translation_key="air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.temp_air,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.hum_air,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="floor_temperature",
|
||||
translation_key="floor_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.temp_floor,
|
||||
supported_fn=lambda status: status.has_floor_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.co2,
|
||||
supported_fn=lambda status: status.has_co2_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="air_quality_index",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.aqi,
|
||||
supported_fn=lambda status: status.has_co2_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="heating_uptime",
|
||||
translation_key="heating_uptime",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda status: status.heating_uptime,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="errors",
|
||||
translation_key="errors",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda status: status.errors,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot sensor platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
AirobotSensor(coordinator, description)
|
||||
for description in SENSOR_TYPES
|
||||
if description.supported_fn(coordinator.data.status)
|
||||
)
|
||||
|
||||
|
||||
class AirobotSensor(AirobotEntity, SensorEntity):
|
||||
"""Representation of an Airobot sensor."""
|
||||
|
||||
entity_description: AirobotSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator,
|
||||
description: AirobotSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.status)
|
||||
@@ -43,6 +43,25 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
},
|
||||
"device_uptime": {
|
||||
"name": "Device uptime"
|
||||
},
|
||||
"errors": {
|
||||
"name": "Error count"
|
||||
},
|
||||
"floor_temperature": {
|
||||
"name": "Floor temperature"
|
||||
},
|
||||
"heating_uptime": {
|
||||
"name": "Heating uptime"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed, please reauthenticate."
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.2"]
|
||||
"requirements": ["aioairzone==1.0.3"]
|
||||
}
|
||||
|
||||
@@ -29,5 +29,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.1"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.2"]
|
||||
}
|
||||
|
||||
@@ -127,7 +127,6 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"binary_sensor",
|
||||
"climate",
|
||||
"cover",
|
||||
"device_tracker",
|
||||
"fan",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
|
||||
@@ -11,13 +11,5 @@
|
||||
"see": {
|
||||
"service": "mdi:account-eye"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"entered_home": {
|
||||
"trigger": "mdi:account-arrow-left"
|
||||
},
|
||||
"left_home": {
|
||||
"trigger": "mdi:account-arrow-right"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted device trackers to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"condition_type": {
|
||||
"is_home": "{entity_name} is home",
|
||||
@@ -48,15 +44,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"see": {
|
||||
"description": "Manually update the records of a seen legacy device tracker in the known_devices.yaml file.",
|
||||
@@ -93,27 +80,5 @@
|
||||
"name": "See"
|
||||
}
|
||||
},
|
||||
"title": "Device tracker",
|
||||
"triggers": {
|
||||
"entered_home": {
|
||||
"description": "Triggers when one or more device trackers enter home.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::device_tracker::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::device_tracker::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Entered home"
|
||||
},
|
||||
"left_home": {
|
||||
"description": "Triggers when one or more device trackers leave home.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::device_tracker::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::device_tracker::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Left home"
|
||||
}
|
||||
}
|
||||
"title": "Device tracker"
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"""Provides triggers for device_trackers."""
|
||||
|
||||
from homeassistant.const import STATE_HOME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_state_trigger,
|
||||
make_from_entity_state_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"entered_home": make_entity_state_trigger(DOMAIN, STATE_HOME),
|
||||
"left_home": make_from_entity_state_trigger(DOMAIN, from_state=STATE_HOME),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for device trackers."""
|
||||
return TRIGGERS
|
||||
@@ -1,18 +0,0 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: device_tracker
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
entered_home: *trigger_common
|
||||
left_home: *trigger_common
|
||||
@@ -56,11 +56,11 @@ class EnergyZeroDataUpdateCoordinator(DataUpdateCoordinator[EnergyZeroData]):
|
||||
energy_tomorrow = None
|
||||
|
||||
try:
|
||||
energy_today = await self.energyzero.energy_prices(
|
||||
energy_today = await self.energyzero.get_electricity_prices_legacy(
|
||||
start_date=today, end_date=today
|
||||
)
|
||||
try:
|
||||
gas_today = await self.energyzero.gas_prices(
|
||||
gas_today = await self.energyzero.get_gas_prices_legacy(
|
||||
start_date=today, end_date=today
|
||||
)
|
||||
except EnergyZeroNoDataError:
|
||||
@@ -69,8 +69,10 @@ class EnergyZeroDataUpdateCoordinator(DataUpdateCoordinator[EnergyZeroData]):
|
||||
if dt_util.utcnow().hour >= THRESHOLD_HOUR:
|
||||
tomorrow = today + timedelta(days=1)
|
||||
try:
|
||||
energy_tomorrow = await self.energyzero.energy_prices(
|
||||
start_date=tomorrow, end_date=tomorrow
|
||||
energy_tomorrow = (
|
||||
await self.energyzero.get_electricity_prices_legacy(
|
||||
start_date=tomorrow, end_date=tomorrow
|
||||
)
|
||||
)
|
||||
except EnergyZeroNoDataError:
|
||||
LOGGER.debug("No data for tomorrow for EnergyZero integration")
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/energyzero",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["energyzero==2.1.1"],
|
||||
"requirements": ["energyzero==4.0.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -128,13 +128,13 @@ async def __get_prices(
|
||||
data: Electricity | Gas
|
||||
|
||||
if price_type == PriceType.GAS:
|
||||
data = await coordinator.energyzero.gas_prices(
|
||||
data = await coordinator.energyzero.get_gas_prices_legacy(
|
||||
start_date=start,
|
||||
end_date=end,
|
||||
vat=vat,
|
||||
)
|
||||
else:
|
||||
data = await coordinator.energyzero.energy_prices(
|
||||
data = await coordinator.energyzero.get_electricity_prices_legacy(
|
||||
start_date=start,
|
||||
end_date=end,
|
||||
vat=vat,
|
||||
|
||||
@@ -15,12 +15,14 @@ from aioesphomeapi import (
|
||||
APIVersion,
|
||||
DeviceInfo as EsphomeDeviceInfo,
|
||||
EncryptionPlaintextAPIError,
|
||||
ExecuteServiceResponse,
|
||||
HomeassistantServiceCall,
|
||||
InvalidAuthAPIError,
|
||||
InvalidEncryptionKeyAPIError,
|
||||
LogLevel,
|
||||
ReconnectLogic,
|
||||
RequiresEncryptionAPIError,
|
||||
SupportsResponseType,
|
||||
UserService,
|
||||
UserServiceArgType,
|
||||
ZWaveProxyRequest,
|
||||
@@ -44,7 +46,9 @@ from homeassistant.core import (
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
State,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import (
|
||||
@@ -58,7 +62,7 @@ from homeassistant.helpers import (
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
json,
|
||||
json as json_helper,
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
@@ -70,6 +74,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
)
|
||||
from homeassistant.helpers.service import async_set_service_schema
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.util.json import json_loads_object
|
||||
|
||||
from .bluetooth import async_connect_scanner
|
||||
from .const import (
|
||||
@@ -91,6 +96,7 @@ from .encryption_key_storage import async_get_encryption_key_storage
|
||||
|
||||
# Import config flow so that it's added to the registry
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
from .enum_mapper import EsphomeEnumMapper
|
||||
|
||||
DEVICE_CONFLICT_ISSUE_FORMAT = "device_conflict-{}"
|
||||
UNPACK_UINT32_BE = struct.Struct(">I").unpack_from
|
||||
@@ -367,7 +373,7 @@ class ESPHomeManager:
|
||||
response_dict = {"response": action_response}
|
||||
|
||||
# JSON encode response data for ESPHome
|
||||
response_data = json.json_bytes(response_dict)
|
||||
response_data = json_helper.json_bytes(response_dict)
|
||||
|
||||
except (
|
||||
ServiceNotFound,
|
||||
@@ -1150,13 +1156,52 @@ ARG_TYPE_METADATA = {
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def execute_service(
|
||||
entry_data: RuntimeEntryData, service: UserService, call: ServiceCall
|
||||
) -> None:
|
||||
"""Execute a service on a node."""
|
||||
async def execute_service(
|
||||
entry_data: RuntimeEntryData,
|
||||
service: UserService,
|
||||
call: ServiceCall,
|
||||
*,
|
||||
supports_response: SupportsResponseType,
|
||||
) -> ServiceResponse:
|
||||
"""Execute a service on a node and optionally wait for response."""
|
||||
# Determine if we should wait for a response
|
||||
# NONE: fire and forget
|
||||
# OPTIONAL/ONLY/STATUS: always wait for success/error confirmation
|
||||
wait_for_response = supports_response != SupportsResponseType.NONE
|
||||
|
||||
if not wait_for_response:
|
||||
# Fire and forget - no response expected
|
||||
try:
|
||||
await entry_data.client.execute_service(service, call.data)
|
||||
except APIConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="action_call_failed",
|
||||
translation_placeholders={
|
||||
"call_name": service.name,
|
||||
"device_name": entry_data.name,
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
else:
|
||||
return None
|
||||
|
||||
# Determine if we need response_data from ESPHome
|
||||
# ONLY: always need response_data
|
||||
# OPTIONAL: only if caller requested it
|
||||
# STATUS: never need response_data (just success/error)
|
||||
need_response_data = supports_response == SupportsResponseType.ONLY or (
|
||||
supports_response == SupportsResponseType.OPTIONAL and call.return_response
|
||||
)
|
||||
|
||||
try:
|
||||
entry_data.client.execute_service(service, call.data)
|
||||
response: (
|
||||
ExecuteServiceResponse | None
|
||||
) = await entry_data.client.execute_service(
|
||||
service,
|
||||
call.data,
|
||||
return_response=need_response_data,
|
||||
)
|
||||
except APIConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -1167,6 +1212,44 @@ def execute_service(
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
except TimeoutError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="action_call_timeout",
|
||||
translation_placeholders={
|
||||
"call_name": service.name,
|
||||
"device_name": entry_data.name,
|
||||
},
|
||||
) from err
|
||||
|
||||
assert response is not None
|
||||
|
||||
if not response.success:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="action_call_failed",
|
||||
translation_placeholders={
|
||||
"call_name": service.name,
|
||||
"device_name": entry_data.name,
|
||||
"error": response.error_message,
|
||||
},
|
||||
)
|
||||
|
||||
# Parse and return response data as JSON if we requested it
|
||||
if need_response_data and response.response_data:
|
||||
try:
|
||||
return json_loads_object(response.response_data)
|
||||
except ValueError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="action_call_failed",
|
||||
translation_placeholders={
|
||||
"call_name": service.name,
|
||||
"device_name": entry_data.name,
|
||||
"error": f"Invalid JSON response: {err}",
|
||||
},
|
||||
) from err
|
||||
return None
|
||||
|
||||
|
||||
def build_service_name(device_info: EsphomeDeviceInfo, service: UserService) -> str:
|
||||
@@ -1174,6 +1257,19 @@ def build_service_name(device_info: EsphomeDeviceInfo, service: UserService) ->
|
||||
return f"{device_info.name.replace('-', '_')}_{service.name}"
|
||||
|
||||
|
||||
# Map ESPHome SupportsResponseType to Home Assistant SupportsResponse
|
||||
# STATUS (100) is ESPHome-specific: waits for success/error internally but
|
||||
# doesn't return data to HA, so it maps to NONE from HA's perspective
|
||||
_RESPONSE_TYPE_MAPPER = EsphomeEnumMapper[SupportsResponseType, SupportsResponse](
|
||||
{
|
||||
SupportsResponseType.NONE: SupportsResponse.NONE,
|
||||
SupportsResponseType.OPTIONAL: SupportsResponse.OPTIONAL,
|
||||
SupportsResponseType.ONLY: SupportsResponse.ONLY,
|
||||
SupportsResponseType.STATUS: SupportsResponse.NONE,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_register_service(
|
||||
hass: HomeAssistant,
|
||||
@@ -1205,11 +1301,21 @@ def _async_register_service(
|
||||
"selector": metadata.selector,
|
||||
}
|
||||
|
||||
# Get the supports_response from the service, defaulting to NONE
|
||||
esphome_supports_response = service.supports_response or SupportsResponseType.NONE
|
||||
ha_supports_response = _RESPONSE_TYPE_MAPPER.from_esphome(esphome_supports_response)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
service_name,
|
||||
partial(execute_service, entry_data, service),
|
||||
partial(
|
||||
execute_service,
|
||||
entry_data,
|
||||
service,
|
||||
supports_response=esphome_supports_response,
|
||||
),
|
||||
vol.Schema(schema),
|
||||
supports_response=ha_supports_response,
|
||||
)
|
||||
async_set_service_schema(
|
||||
hass,
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.10.0",
|
||||
"aioesphomeapi==43.0.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -128,6 +128,9 @@
|
||||
"action_call_failed": {
|
||||
"message": "Failed to execute the action call {call_name} on {device_name}: {error}"
|
||||
},
|
||||
"action_call_timeout": {
|
||||
"message": "Timeout waiting for response from action call {call_name} on {device_name}"
|
||||
},
|
||||
"error_communicating_with_device": {
|
||||
"message": "Error communicating with the device {device_name}: {error}"
|
||||
},
|
||||
|
||||
@@ -42,6 +42,11 @@ class FressnapfTrackerDeviceTracker(FressnapfTrackerBaseEntity, TrackerEntity):
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.coordinator.data.position is not None
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
"""Return the entity picture url."""
|
||||
return self.coordinator.data.icon
|
||||
|
||||
@property
|
||||
def latitude(self) -> float | None:
|
||||
"""Return latitude value of the device."""
|
||||
|
||||
@@ -26,7 +26,7 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.data_entry_flow import SectionConfig, section
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import LocationSelector, LocationSelectorConfig
|
||||
|
||||
@@ -38,7 +38,8 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
vol.Optional(SECTION_API_KEY_OPTIONS): section(
|
||||
vol.Schema({vol.Optional(CONF_REFERRER): str}), {"collapsed": True}
|
||||
vol.Schema({vol.Optional(CONF_REFERRER): str}),
|
||||
SectionConfig(collapsed=True),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.2.2"]
|
||||
"requirements": ["pylamarzocco==2.2.3"]
|
||||
}
|
||||
|
||||
@@ -191,6 +191,7 @@ class ProgramPhaseWashingMachine(MieleEnum, missing_to_none=True):
|
||||
drying = 280
|
||||
disinfecting = 285
|
||||
flex_load_active = 11047
|
||||
automatic_start = 11044
|
||||
|
||||
|
||||
class ProgramPhaseTumbleDryer(MieleEnum, missing_to_none=True):
|
||||
@@ -451,19 +452,19 @@ class WashingMachineProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for washing machines."""
|
||||
|
||||
no_program = 0, -1
|
||||
cottons = 1
|
||||
cottons = 1, 10001
|
||||
minimum_iron = 3
|
||||
delicates = 4
|
||||
woollens = 8
|
||||
silks = 9
|
||||
delicates = 4, 10022
|
||||
woollens = 8, 10040
|
||||
silks = 9, 10042
|
||||
starch = 17
|
||||
rinse = 18
|
||||
drain_spin = 21
|
||||
curtains = 22
|
||||
shirts = 23
|
||||
rinse = 18, 10058
|
||||
drain_spin = 21, 10036
|
||||
curtains = 22, 10055
|
||||
shirts = 23, 10038
|
||||
denim = 24, 123
|
||||
proofing = 27
|
||||
sportswear = 29
|
||||
proofing = 27, 10057
|
||||
sportswear = 29, 10052
|
||||
automatic_plus = 31
|
||||
outerwear = 37
|
||||
pillows = 39
|
||||
@@ -472,19 +473,29 @@ class WashingMachineProgramId(MieleEnum, missing_to_none=True):
|
||||
rinse_out_lint = 48 # washer-dryer
|
||||
dark_garments = 50
|
||||
separate_rinse_starch = 52
|
||||
first_wash = 53
|
||||
first_wash = 53, 10053
|
||||
cottons_hygiene = 69
|
||||
steam_care = 75 # washer-dryer
|
||||
freshen_up = 76 # washer-dryer
|
||||
trainers = 77
|
||||
clean_machine = 91
|
||||
down_duvets = 95
|
||||
express_20 = 122
|
||||
trainers = 77, 10056
|
||||
clean_machine = 91, 10067
|
||||
down_duvets = 95, 10050
|
||||
express_20 = 122, 10029
|
||||
down_filled_items = 129
|
||||
cottons_eco = 133
|
||||
quick_power_wash = 146, 10031
|
||||
eco_40_60 = 190, 10007
|
||||
normal = 10001
|
||||
bed_linen = 10047
|
||||
easy_care = 10016
|
||||
dark_jeans = 10048
|
||||
outdoor_garments = 10049
|
||||
game_pieces = 10070
|
||||
stuffed_toys = 10069
|
||||
pre_ironing = 10059
|
||||
trainers_refresh = 10066
|
||||
smartmatic = 10068
|
||||
cottonrepair = 10065
|
||||
powerfresh = 10075
|
||||
|
||||
|
||||
class DishWasherProgramId(MieleEnum, missing_to_none=True):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pymiele"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymiele==0.6.0"],
|
||||
"requirements": ["pymiele==0.6.1"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_mieleathome._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -411,6 +411,7 @@
|
||||
"cook_bacon": "Cook bacon",
|
||||
"cool_air": "Cool air",
|
||||
"corn_on_the_cob": "Corn on the cob",
|
||||
"cottonrepair": "CottonRepair",
|
||||
"cottons": "Cottons",
|
||||
"cottons_eco": "Cottons ECO",
|
||||
"cottons_hygiene": "Cottons hygiene",
|
||||
@@ -440,6 +441,7 @@
|
||||
"custom_program_8": "Custom program 8",
|
||||
"custom_program_9": "Custom program 9",
|
||||
"dark_garments": "Dark garments",
|
||||
"dark_jeans": "Dark/jeans",
|
||||
"dark_mixed_grain_bread": "Dark mixed grain bread",
|
||||
"decrystallise_honey": "Decrystallize honey",
|
||||
"defrost": "Defrost",
|
||||
@@ -457,6 +459,7 @@
|
||||
"drop_cookies_2_trays": "Drop cookies (2 trays)",
|
||||
"duck": "Duck",
|
||||
"dutch_hash": "Dutch hash",
|
||||
"easy_care": "Easy care",
|
||||
"eco": "ECO",
|
||||
"eco_40_60": "ECO 40-60",
|
||||
"eco_fan_heat": "ECO fan heat",
|
||||
@@ -487,6 +490,7 @@
|
||||
"fruit_streusel_cake": "Fruit streusel cake",
|
||||
"fruit_tea": "Fruit tea",
|
||||
"full_grill": "Full grill",
|
||||
"game_pieces": "Game pieces",
|
||||
"gentle": "Gentle",
|
||||
"gentle_denim": "Gentle denim",
|
||||
"gentle_minimum_iron": "Gentle minimum iron",
|
||||
@@ -607,6 +611,7 @@
|
||||
"oats_cracked": "Oats (cracked)",
|
||||
"oats_whole": "Oats (whole)",
|
||||
"osso_buco": "Osso buco",
|
||||
"outdoor_garments": "Outdoor garments",
|
||||
"outerwear": "Outerwear",
|
||||
"oyster_mushroom_diced": "Oyster mushroom (diced)",
|
||||
"oyster_mushroom_strips": "Oyster mushroom (strips)",
|
||||
@@ -713,8 +718,10 @@
|
||||
"potatoes_waxy_whole_small": "Potatoes (waxy, whole, small)",
|
||||
"poularde_breast": "Poularde breast",
|
||||
"poularde_whole": "Poularde (whole)",
|
||||
"power_fresh": "PowerFresh",
|
||||
"power_wash": "PowerWash",
|
||||
"prawns": "Prawns",
|
||||
"pre_ironing": "Pre-ironing",
|
||||
"proofing": "Proofing",
|
||||
"prove_15_min": "Prove for 15 min",
|
||||
"prove_30_min": "Prove for 30 min",
|
||||
@@ -807,6 +814,7 @@
|
||||
"simiao_rapid_steam_cooking": "Simiao (rapid steam cooking)",
|
||||
"simiao_steam_cooking": "Simiao (steam cooking)",
|
||||
"small_shrimps": "Small shrimps",
|
||||
"smartmatic": "SmartMatic",
|
||||
"smoothing": "Smoothing",
|
||||
"snow_pea": "Snow pea",
|
||||
"soak": "Soak",
|
||||
@@ -833,6 +841,7 @@
|
||||
"sterilize_crockery": "Sterilize crockery",
|
||||
"stollen": "Stollen",
|
||||
"stuffed_cabbage": "Stuffed cabbage",
|
||||
"stuffed_toys": "Stuffed toys",
|
||||
"sweat_onions": "Sweat onions",
|
||||
"swede_cut_into_batons": "Swede (cut into batons)",
|
||||
"swede_diced": "Swede (diced)",
|
||||
@@ -855,6 +864,7 @@
|
||||
"top_heat": "Top heat",
|
||||
"tortellini_fresh": "Tortellini (fresh)",
|
||||
"trainers": "Trainers",
|
||||
"trainers_refresh": "Trainers refresh",
|
||||
"treacle_sponge_pudding_one_large": "Treacle sponge pudding (one large)",
|
||||
"treacle_sponge_pudding_several_small": "Treacle sponge pudding (several small)",
|
||||
"trout": "Trout",
|
||||
@@ -935,6 +945,7 @@
|
||||
"2nd_grinding": "2nd grinding",
|
||||
"2nd_pre_brewing": "2nd pre-brewing",
|
||||
"anti_crease": "Anti-crease",
|
||||
"automatic_start": "Automatic start",
|
||||
"blocked_brushes": "Brushes blocked",
|
||||
"blocked_drive_wheels": "Drive wheels blocked",
|
||||
"blocked_front_wheel": "Front wheel blocked",
|
||||
|
||||
@@ -64,10 +64,11 @@ class PingDataICMPLib(PingData):
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"async_ping returned: reachable=%s sent=%i received=%s",
|
||||
"async_ping returned: reachable=%s sent=%i received=%s loss=%s",
|
||||
data.is_alive,
|
||||
data.packets_sent,
|
||||
data.packets_received,
|
||||
data.packet_loss * 100,
|
||||
)
|
||||
|
||||
self.is_alive = data.is_alive
|
||||
@@ -80,6 +81,7 @@ class PingDataICMPLib(PingData):
|
||||
"max": data.max_rtt,
|
||||
"avg": data.avg_rtt,
|
||||
"jitter": data.jitter,
|
||||
"loss": data.packet_loss * 100,
|
||||
}
|
||||
|
||||
|
||||
|
||||
9
homeassistant/components/ping/icons.json
Normal file
9
homeassistant/components/ping/icons.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"loss": {
|
||||
"default": "mdi:alert-circle-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory, UnitOfTime
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -82,6 +82,16 @@ SENSORS: tuple[PingSensorEntityDescription, ...] = (
|
||||
value_fn=lambda result: result.data.get("jitter"),
|
||||
has_fn=lambda result: "jitter" in result.data,
|
||||
),
|
||||
PingSensorEntityDescription(
|
||||
key="loss",
|
||||
translation_key="loss",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda result: result.data.get("loss"),
|
||||
has_fn=lambda result: "loss" in result.data,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -22,6 +22,9 @@
|
||||
"jitter": {
|
||||
"name": "Jitter"
|
||||
},
|
||||
"loss": {
|
||||
"name": "Packet loss"
|
||||
},
|
||||
"round_trip_time_avg": {
|
||||
"name": "Round-trip time average"
|
||||
},
|
||||
|
||||
@@ -20,7 +20,7 @@ from roborock.devices.device_manager import UserParams, create_device_manager
|
||||
from roborock.map.map_parser import MapParserConfig
|
||||
|
||||
from homeassistant.const import CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -99,10 +99,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="home_data_fail",
|
||||
) from err
|
||||
|
||||
async def shutdown_roborock(_: Event | None = None) -> None:
|
||||
await asyncio.gather(device_manager.close(), cache.flush())
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_roborock)
|
||||
)
|
||||
entry.async_on_unload(shutdown_roborock)
|
||||
|
||||
devices = await device_manager.get_devices()
|
||||
_LOGGER.debug("Device manager found %d devices", len(devices))
|
||||
for device in devices:
|
||||
entry.async_on_unload(device.close)
|
||||
|
||||
coordinators = await asyncio.gather(
|
||||
*build_setup_functions(hass, entry, devices, user_data),
|
||||
@@ -124,25 +131,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_coordinators",
|
||||
)
|
||||
valid_coordinators = RoborockCoordinators(v1_coords, a01_coords)
|
||||
|
||||
async def on_stop(_: Any) -> None:
|
||||
_LOGGER.debug("Shutting down roborock")
|
||||
await asyncio.gather(
|
||||
*(
|
||||
coordinator.async_shutdown()
|
||||
for coordinator in valid_coordinators.values()
|
||||
),
|
||||
cache.flush(),
|
||||
)
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
on_stop,
|
||||
)
|
||||
)
|
||||
entry.runtime_data = valid_coordinators
|
||||
entry.runtime_data = RoborockCoordinators(v1_coords, a01_coords)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==3.10.2",
|
||||
"python-roborock==3.10.10",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -65,11 +65,9 @@ rules:
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: todo
|
||||
comment: The Cloud vs Local API warning should probably be a repair issue.
|
||||
repair-issues: done
|
||||
stale-devices: done
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
|
||||
@@ -223,11 +223,13 @@
|
||||
"name": "Status",
|
||||
"state": {
|
||||
"air_drying_stopping": "Air drying stopping",
|
||||
"attaching_the_mop": "Attaching the mop",
|
||||
"charger_disconnected": "Charger disconnected",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"charging_complete": "Charging complete",
|
||||
"charging_problem": "Charging problem",
|
||||
"cleaning": "Cleaning",
|
||||
"detaching_the_mop": "Detaching the mop",
|
||||
"device_offline": "Device offline",
|
||||
"docking": "Docking",
|
||||
"egg_attack": "Cupid mode",
|
||||
|
||||
@@ -31,10 +31,12 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STATE_CODE_TO_STATE = {
|
||||
RoborockStateCode.starting: VacuumActivity.IDLE, # "Starting"
|
||||
RoborockStateCode.attaching_the_mop: VacuumActivity.DOCKED, # "Attaching the mop"
|
||||
RoborockStateCode.charger_disconnected: VacuumActivity.IDLE, # "Charger disconnected"
|
||||
RoborockStateCode.idle: VacuumActivity.IDLE, # "Idle"
|
||||
RoborockStateCode.remote_control_active: VacuumActivity.CLEANING, # "Remote control active"
|
||||
RoborockStateCode.cleaning: VacuumActivity.CLEANING, # "Cleaning"
|
||||
RoborockStateCode.detaching_the_mop: VacuumActivity.DOCKED, # "Detaching the mop"
|
||||
RoborockStateCode.returning_home: VacuumActivity.RETURNING, # "Returning home"
|
||||
RoborockStateCode.manual_mode: VacuumActivity.CLEANING, # "Manual mode"
|
||||
RoborockStateCode.charging: VacuumActivity.DOCKED, # "Charging"
|
||||
|
||||
@@ -170,6 +170,9 @@ async def _async_setup_block_entry(
|
||||
device_entry = dev_reg.async_get_device(
|
||||
connections={(CONNECTION_NETWORK_MAC, dr.format_mac(entry.unique_id))},
|
||||
)
|
||||
# https://github.com/home-assistant/core/pull/48076
|
||||
if device_entry and entry.entry_id not in device_entry.config_entries:
|
||||
device_entry = None
|
||||
|
||||
sleep_period = entry.data.get(CONF_SLEEP_PERIOD)
|
||||
runtime_data = entry.runtime_data
|
||||
@@ -280,6 +283,9 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry)
|
||||
device_entry = dev_reg.async_get_device(
|
||||
connections={(CONNECTION_NETWORK_MAC, dr.format_mac(entry.unique_id))},
|
||||
)
|
||||
# https://github.com/home-assistant/core/pull/48076
|
||||
if device_entry and entry.entry_id not in device_entry.config_entries:
|
||||
device_entry = None
|
||||
|
||||
sleep_period = entry.data.get(CONF_SLEEP_PERIOD)
|
||||
runtime_data = entry.runtime_data
|
||||
|
||||
@@ -13,11 +13,6 @@ from aioshelly.ble.manufacturer_data import (
|
||||
has_rpc_over_ble,
|
||||
parse_shelly_manufacturer_data,
|
||||
)
|
||||
from aioshelly.ble.provisioning import (
|
||||
async_provision_wifi,
|
||||
async_scan_wifi_networks,
|
||||
ble_rpc_device,
|
||||
)
|
||||
from aioshelly.block_device import BlockDevice
|
||||
from aioshelly.common import ConnectionOptions, get_info
|
||||
from aioshelly.const import BLOCK_GENERATIONS, DEFAULT_HTTP_PORT, RPC_GENERATIONS
|
||||
@@ -30,6 +25,7 @@ from aioshelly.exceptions import (
|
||||
RpcCallError,
|
||||
)
|
||||
from aioshelly.rpc_device import RpcDevice
|
||||
from aioshelly.rpc_device.models import ShellyWiFiNetwork
|
||||
from aioshelly.zeroconf import async_discover_devices, async_lookup_device_by_name
|
||||
from bleak.backends.device import BLEDevice
|
||||
import voluptuous as vol
|
||||
@@ -118,31 +114,6 @@ MANUAL_ENTRY_STRING = "manual"
|
||||
DISCOVERY_SOURCES = {SOURCE_BLUETOOTH, SOURCE_ZEROCONF}
|
||||
|
||||
|
||||
async def async_get_ip_from_ble(ble_device: BLEDevice) -> str | None:
|
||||
"""Get device IP address via BLE after WiFi provisioning.
|
||||
|
||||
Args:
|
||||
ble_device: BLE device to query
|
||||
|
||||
Returns:
|
||||
IP address string if available, None otherwise
|
||||
|
||||
"""
|
||||
try:
|
||||
async with ble_rpc_device(ble_device) as device:
|
||||
await device.update_status()
|
||||
if (
|
||||
(wifi := device.status.get("wifi"))
|
||||
and isinstance(wifi, dict)
|
||||
and (ip := wifi.get("sta_ip"))
|
||||
):
|
||||
return cast(str, ip)
|
||||
return None
|
||||
except (DeviceConnectionError, RpcCallError) as err:
|
||||
LOGGER.debug("Failed to get IP via BLE: %s", err)
|
||||
return None
|
||||
|
||||
|
||||
# BLE provisioning flow steps that are in the finishing state
|
||||
# Used to determine if a BLE flow should be aborted when zeroconf discovers the device
|
||||
BLUETOOTH_FINISHING_STEPS = {"do_provision", "provision_done"}
|
||||
@@ -244,13 +215,14 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device_info: dict[str, Any] = {}
|
||||
ble_device: BLEDevice | None = None
|
||||
device_name: str = ""
|
||||
wifi_networks: list[dict[str, Any]] = []
|
||||
wifi_networks: list[ShellyWiFiNetwork] = []
|
||||
selected_ssid: str = ""
|
||||
_provision_task: asyncio.Task | None = None
|
||||
_provision_result: ConfigFlowResult | None = None
|
||||
disable_ap_after_provision: bool = True
|
||||
disable_ble_rpc_after_provision: bool = True
|
||||
_discovered_devices: dict[str, DiscoveredDeviceZeroconf | DiscoveredDeviceBluetooth]
|
||||
_ble_rpc_device: RpcDevice | None = None
|
||||
|
||||
@staticmethod
|
||||
def _get_name_from_mac_and_ble_model(
|
||||
@@ -299,6 +271,81 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return mac, device_name
|
||||
|
||||
async def _async_ensure_ble_connected(self) -> RpcDevice:
|
||||
"""Ensure BLE RPC device is connected, reconnecting if needed.
|
||||
|
||||
Maintains a persistent BLE connection across config flow steps to avoid
|
||||
the overhead of reconnecting between WiFi scan and provisioning steps.
|
||||
|
||||
Returns:
|
||||
Connected RpcDevice instance
|
||||
|
||||
Raises:
|
||||
DeviceConnectionError: If connection fails
|
||||
RpcCallError: If ping fails after connection
|
||||
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert self.ble_device is not None
|
||||
|
||||
if self._ble_rpc_device is not None and self._ble_rpc_device.connected:
|
||||
# Ping to verify connection is still alive
|
||||
try:
|
||||
await self._ble_rpc_device.update_status()
|
||||
except (DeviceConnectionError, RpcCallError):
|
||||
# Connection dropped, need to reconnect
|
||||
LOGGER.debug("BLE connection lost, reconnecting")
|
||||
await self._async_disconnect_ble()
|
||||
else:
|
||||
return self._ble_rpc_device
|
||||
|
||||
# Create new connection
|
||||
LOGGER.debug("Creating new BLE RPC connection to %s", self.ble_device.address)
|
||||
options = ConnectionOptions(ble_device=self.ble_device)
|
||||
device = await RpcDevice.create(
|
||||
aiohttp_session=None, ws_context=None, ip_or_options=options
|
||||
)
|
||||
try:
|
||||
await device.initialize()
|
||||
except (DeviceConnectionError, RpcCallError):
|
||||
await device.shutdown()
|
||||
raise
|
||||
self._ble_rpc_device = device
|
||||
return self._ble_rpc_device
|
||||
|
||||
async def _async_disconnect_ble(self) -> None:
|
||||
"""Disconnect and cleanup BLE RPC device."""
|
||||
if self._ble_rpc_device is not None:
|
||||
try:
|
||||
await self._ble_rpc_device.shutdown()
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.debug("Error during BLE shutdown", exc_info=True)
|
||||
finally:
|
||||
self._ble_rpc_device = None
|
||||
|
||||
async def _async_get_ip_from_ble(self) -> str | None:
|
||||
"""Get device IP address via BLE after WiFi provisioning.
|
||||
|
||||
Uses the persistent BLE connection to get the device's sta_ip from status.
|
||||
|
||||
Returns:
|
||||
IP address string if available, None otherwise
|
||||
|
||||
"""
|
||||
try:
|
||||
device = await self._async_ensure_ble_connected()
|
||||
except (DeviceConnectionError, RpcCallError) as err:
|
||||
LOGGER.debug("Failed to get IP via BLE: %s", err)
|
||||
return None
|
||||
|
||||
if (
|
||||
(wifi := device.status.get("wifi"))
|
||||
and isinstance(wifi, dict)
|
||||
and (ip := wifi.get("sta_ip"))
|
||||
):
|
||||
return cast(str, ip)
|
||||
return None
|
||||
|
||||
async def _async_discover_zeroconf_devices(
|
||||
self,
|
||||
) -> dict[str, DiscoveredDeviceZeroconf]:
|
||||
@@ -736,20 +783,21 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
password = user_input[CONF_PASSWORD]
|
||||
return await self.async_step_do_provision({"password": password})
|
||||
|
||||
# Scan for WiFi networks via BLE
|
||||
if TYPE_CHECKING:
|
||||
assert self.ble_device is not None
|
||||
# Scan for WiFi networks via BLE using persistent connection
|
||||
try:
|
||||
self.wifi_networks = await async_scan_wifi_networks(self.ble_device)
|
||||
device = await self._async_ensure_ble_connected()
|
||||
self.wifi_networks = await device.wifi_scan()
|
||||
except (DeviceConnectionError, RpcCallError) as err:
|
||||
LOGGER.debug("Failed to scan WiFi networks via BLE: %s", err)
|
||||
# "Writing is not permitted" error means device rejects BLE writes
|
||||
# and BLE provisioning is disabled - user must use Shelly app
|
||||
if "not permitted" in str(err):
|
||||
await self._async_disconnect_ble()
|
||||
return self.async_abort(reason="ble_not_permitted")
|
||||
return await self.async_step_wifi_scan_failed()
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception during WiFi scan")
|
||||
await self._async_disconnect_ble()
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
# Sort by RSSI (strongest signal first - higher/less negative values first)
|
||||
@@ -870,17 +918,21 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
Returns the flow result to be stored in self._provision_result, or None if failed.
|
||||
"""
|
||||
# Provision WiFi via BLE
|
||||
if TYPE_CHECKING:
|
||||
assert self.ble_device is not None
|
||||
# Provision WiFi via BLE using persistent connection
|
||||
try:
|
||||
await async_provision_wifi(self.ble_device, self.selected_ssid, password)
|
||||
device = await self._async_ensure_ble_connected()
|
||||
await device.wifi_setconfig(
|
||||
sta_ssid=self.selected_ssid,
|
||||
sta_password=password,
|
||||
sta_enable=True,
|
||||
)
|
||||
except (DeviceConnectionError, RpcCallError) as err:
|
||||
LOGGER.debug("Failed to provision WiFi via BLE: %s", err)
|
||||
# BLE connection/communication failed - allow retry from network selection
|
||||
return None
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception during WiFi provisioning")
|
||||
await self._async_disconnect_ble()
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
LOGGER.debug(
|
||||
@@ -918,7 +970,7 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
LOGGER.debug(
|
||||
"Active lookup failed, trying to get IP address via BLE as fallback"
|
||||
)
|
||||
if ip := await async_get_ip_from_ble(self.ble_device):
|
||||
if ip := await self._async_get_ip_from_ble():
|
||||
LOGGER.debug("Got IP %s from BLE, using it", ip)
|
||||
state.host = ip
|
||||
state.port = DEFAULT_HTTP_PORT
|
||||
@@ -995,12 +1047,17 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if TYPE_CHECKING:
|
||||
assert mac is not None
|
||||
|
||||
async with self._async_provision_context(mac) as state:
|
||||
self._provision_result = (
|
||||
await self._async_provision_wifi_and_wait_for_zeroconf(
|
||||
mac, password, state
|
||||
try:
|
||||
async with self._async_provision_context(mac) as state:
|
||||
self._provision_result = (
|
||||
await self._async_provision_wifi_and_wait_for_zeroconf(
|
||||
mac, password, state
|
||||
)
|
||||
)
|
||||
)
|
||||
finally:
|
||||
# Always disconnect BLE after provisioning attempt completes
|
||||
# We either succeeded (and will use IP now) or failed (and user will retry)
|
||||
await self._async_disconnect_ble()
|
||||
|
||||
async def async_step_do_provision(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -1219,6 +1276,17 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Get info from shelly device."""
|
||||
return await get_info(async_get_clientsession(self.hass), host, port=port)
|
||||
|
||||
@callback
|
||||
def async_remove(self) -> None:
|
||||
"""Handle flow removal - cleanup BLE connection."""
|
||||
super().async_remove()
|
||||
if self._ble_rpc_device is not None:
|
||||
# Schedule cleanup as background task since async_remove is sync
|
||||
self.hass.async_create_background_task(
|
||||
self._async_disconnect_ble(),
|
||||
name="shelly_config_flow_ble_cleanup",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ShellyConfigEntry) -> OptionsFlowHandler:
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioshelly"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioshelly==13.22.0"],
|
||||
"requirements": ["aioshelly==13.23.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "shelly*",
|
||||
|
||||
@@ -438,6 +438,8 @@ def get_rpc_sub_device_name(
|
||||
return f"{device.name} Energy Meter {component_id}"
|
||||
if component == "em" and emeter_phase is not None:
|
||||
return f"{device.name} Phase {emeter_phase}"
|
||||
if component == "switch":
|
||||
return f"{device.name} Output {component_id}"
|
||||
|
||||
return f"{device.name} {component.title()} {component_id}"
|
||||
|
||||
|
||||
@@ -9,12 +9,15 @@ from tuya_sharing import CustomerDevice
|
||||
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .const import DPType
|
||||
from .type_information import (
|
||||
BitmapTypeInformation,
|
||||
BooleanTypeInformation,
|
||||
EnumTypeInformation,
|
||||
IntegerTypeInformation,
|
||||
JsonTypeInformation,
|
||||
RawTypeInformation,
|
||||
StringTypeInformation,
|
||||
TypeInformation,
|
||||
find_dpcode,
|
||||
)
|
||||
|
||||
|
||||
@@ -79,7 +82,7 @@ class DPCodeWrapper(DeviceWrapper):
|
||||
class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
|
||||
"""Base DPCode wrapper with Type Information."""
|
||||
|
||||
DPTYPE: DPType
|
||||
_DPTYPE: type[T]
|
||||
type_information: T
|
||||
|
||||
def __init__(self, dpcode: str, type_information: T) -> None:
|
||||
@@ -102,8 +105,8 @@ class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
|
||||
prefer_function: bool = False,
|
||||
) -> Self | None:
|
||||
"""Find and return a DPCodeTypeInformationWrapper for the given DP codes."""
|
||||
if type_information := find_dpcode( # type: ignore[call-overload]
|
||||
device, dpcodes, dptype=cls.DPTYPE, prefer_function=prefer_function
|
||||
if type_information := cls._DPTYPE.find_dpcode(
|
||||
device, dpcodes, prefer_function=prefer_function
|
||||
):
|
||||
return cls(
|
||||
dpcode=type_information.dpcode, type_information=type_information
|
||||
@@ -111,10 +114,10 @@ class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
|
||||
return None
|
||||
|
||||
|
||||
class DPCodeBase64Wrapper(DPCodeTypeInformationWrapper[TypeInformation]):
|
||||
class DPCodeBase64Wrapper(DPCodeTypeInformationWrapper[RawTypeInformation]):
|
||||
"""Wrapper to extract information from a RAW/binary value."""
|
||||
|
||||
DPTYPE = DPType.RAW
|
||||
_DPTYPE = RawTypeInformation
|
||||
|
||||
def read_bytes(self, device: CustomerDevice) -> bytes | None:
|
||||
"""Read the device value for the dpcode."""
|
||||
@@ -125,13 +128,13 @@ class DPCodeBase64Wrapper(DPCodeTypeInformationWrapper[TypeInformation]):
|
||||
return decoded
|
||||
|
||||
|
||||
class DPCodeBooleanWrapper(DPCodeTypeInformationWrapper[TypeInformation]):
|
||||
class DPCodeBooleanWrapper(DPCodeTypeInformationWrapper[BooleanTypeInformation]):
|
||||
"""Simple wrapper for boolean values.
|
||||
|
||||
Supports True/False only.
|
||||
"""
|
||||
|
||||
DPTYPE = DPType.BOOLEAN
|
||||
_DPTYPE = BooleanTypeInformation
|
||||
|
||||
def _convert_value_to_raw_value(
|
||||
self, device: CustomerDevice, value: Any
|
||||
@@ -144,10 +147,10 @@ class DPCodeBooleanWrapper(DPCodeTypeInformationWrapper[TypeInformation]):
|
||||
raise ValueError(f"Invalid boolean value `{value}`")
|
||||
|
||||
|
||||
class DPCodeJsonWrapper(DPCodeTypeInformationWrapper[TypeInformation]):
|
||||
class DPCodeJsonWrapper(DPCodeTypeInformationWrapper[JsonTypeInformation]):
|
||||
"""Wrapper to extract information from a JSON value."""
|
||||
|
||||
DPTYPE = DPType.JSON
|
||||
_DPTYPE = JsonTypeInformation
|
||||
|
||||
def read_json(self, device: CustomerDevice) -> Any | None:
|
||||
"""Read the device value for the dpcode."""
|
||||
@@ -159,7 +162,7 @@ class DPCodeJsonWrapper(DPCodeTypeInformationWrapper[TypeInformation]):
|
||||
class DPCodeEnumWrapper(DPCodeTypeInformationWrapper[EnumTypeInformation]):
|
||||
"""Simple wrapper for EnumTypeInformation values."""
|
||||
|
||||
DPTYPE = DPType.ENUM
|
||||
_DPTYPE = EnumTypeInformation
|
||||
|
||||
def _convert_value_to_raw_value(self, device: CustomerDevice, value: Any) -> Any:
|
||||
"""Convert a Home Assistant value back to a raw device value."""
|
||||
@@ -175,7 +178,7 @@ class DPCodeEnumWrapper(DPCodeTypeInformationWrapper[EnumTypeInformation]):
|
||||
class DPCodeIntegerWrapper(DPCodeTypeInformationWrapper[IntegerTypeInformation]):
|
||||
"""Simple wrapper for IntegerTypeInformation values."""
|
||||
|
||||
DPTYPE = DPType.INTEGER
|
||||
_DPTYPE = IntegerTypeInformation
|
||||
|
||||
def __init__(self, dpcode: str, type_information: IntegerTypeInformation) -> None:
|
||||
"""Init DPCodeIntegerWrapper."""
|
||||
@@ -195,10 +198,10 @@ class DPCodeIntegerWrapper(DPCodeTypeInformationWrapper[IntegerTypeInformation])
|
||||
)
|
||||
|
||||
|
||||
class DPCodeStringWrapper(DPCodeTypeInformationWrapper[TypeInformation]):
|
||||
class DPCodeStringWrapper(DPCodeTypeInformationWrapper[StringTypeInformation]):
|
||||
"""Wrapper to extract information from a STRING value."""
|
||||
|
||||
DPTYPE = DPType.STRING
|
||||
_DPTYPE = StringTypeInformation
|
||||
|
||||
|
||||
class DPCodeBitmapBitWrapper(DPCodeWrapper):
|
||||
@@ -225,7 +228,7 @@ class DPCodeBitmapBitWrapper(DPCodeWrapper):
|
||||
) -> Self | None:
|
||||
"""Find and return a DPCodeBitmapBitWrapper for the given DP codes."""
|
||||
if (
|
||||
type_information := find_dpcode(device, dpcodes, dptype=DPType.BITMAP)
|
||||
type_information := BitmapTypeInformation.find_dpcode(device, dpcodes)
|
||||
) and bitmap_key in type_information.label:
|
||||
return cls(
|
||||
type_information.dpcode, type_information.label.index(bitmap_key)
|
||||
|
||||
@@ -36,7 +36,6 @@ from .const import (
|
||||
TUYA_DISCOVERY_NEW,
|
||||
DeviceCategory,
|
||||
DPCode,
|
||||
DPType,
|
||||
)
|
||||
from .entity import TuyaEntity
|
||||
from .models import (
|
||||
@@ -54,7 +53,7 @@ from .type_information import EnumTypeInformation
|
||||
class _WindDirectionWrapper(DPCodeTypeInformationWrapper[EnumTypeInformation]):
|
||||
"""Custom DPCode Wrapper for converting enum to wind direction."""
|
||||
|
||||
DPTYPE = DPType.ENUM
|
||||
_DPTYPE = EnumTypeInformation
|
||||
|
||||
_WIND_DIRECTIONS = {
|
||||
"north": 0.0,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Literal, Self, cast, overload
|
||||
from typing import Any, ClassVar, Self, cast
|
||||
|
||||
from tuya_sharing import CustomerDevice
|
||||
|
||||
@@ -38,6 +38,7 @@ class TypeInformation[T]:
|
||||
As provided by the SDK, from `device.function` / `device.status_range`.
|
||||
"""
|
||||
|
||||
_DPTYPE: ClassVar[DPType]
|
||||
dpcode: str
|
||||
type_data: str | None = None
|
||||
|
||||
@@ -52,19 +53,57 @@ class TypeInformation[T]:
|
||||
return raw_value
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
def _from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return a TypeInformation object."""
|
||||
return cls(dpcode=dpcode, type_data=type_data)
|
||||
|
||||
@classmethod
|
||||
def find_dpcode(
|
||||
cls,
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | tuple[str, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
) -> Self | None:
|
||||
"""Find type information for a matching DP code available for this device."""
|
||||
if dpcodes is None:
|
||||
return None
|
||||
|
||||
if not isinstance(dpcodes, tuple):
|
||||
dpcodes = (dpcodes,)
|
||||
|
||||
lookup_tuple = (
|
||||
(device.function, device.status_range)
|
||||
if prefer_function
|
||||
else (device.status_range, device.function)
|
||||
)
|
||||
|
||||
for dpcode in dpcodes:
|
||||
for device_specs in lookup_tuple:
|
||||
if (
|
||||
(current_definition := device_specs.get(dpcode))
|
||||
and parse_dptype(current_definition.type) is cls._DPTYPE
|
||||
and (
|
||||
type_information := cls._from_json(
|
||||
dpcode=dpcode, type_data=current_definition.values
|
||||
)
|
||||
)
|
||||
):
|
||||
return type_information
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BitmapTypeInformation(TypeInformation[int]):
|
||||
"""Bitmap type information."""
|
||||
|
||||
_DPTYPE = DPType.BITMAP
|
||||
|
||||
label: list[str]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
def _from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return a BitmapTypeInformation object."""
|
||||
if not (parsed := json_loads_object(type_data)):
|
||||
return None
|
||||
@@ -79,6 +118,8 @@ class BitmapTypeInformation(TypeInformation[int]):
|
||||
class BooleanTypeInformation(TypeInformation[bool]):
|
||||
"""Boolean type information."""
|
||||
|
||||
_DPTYPE = DPType.BOOLEAN
|
||||
|
||||
def process_raw_value(
|
||||
self, raw_value: Any | None, device: CustomerDevice
|
||||
) -> bool | None:
|
||||
@@ -107,6 +148,8 @@ class BooleanTypeInformation(TypeInformation[bool]):
|
||||
class EnumTypeInformation(TypeInformation[str]):
|
||||
"""Enum type information."""
|
||||
|
||||
_DPTYPE = DPType.ENUM
|
||||
|
||||
range: list[str]
|
||||
|
||||
def process_raw_value(
|
||||
@@ -133,7 +176,7 @@ class EnumTypeInformation(TypeInformation[str]):
|
||||
return raw_value
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
def _from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return an EnumTypeInformation object."""
|
||||
if not (parsed := json_loads_object(type_data)):
|
||||
return None
|
||||
@@ -148,6 +191,8 @@ class EnumTypeInformation(TypeInformation[str]):
|
||||
class IntegerTypeInformation(TypeInformation[float]):
|
||||
"""Integer type information."""
|
||||
|
||||
_DPTYPE = DPType.INTEGER
|
||||
|
||||
min: int
|
||||
max: int
|
||||
scale: int
|
||||
@@ -223,7 +268,7 @@ class IntegerTypeInformation(TypeInformation[float]):
|
||||
return raw_value / (10**self.scale)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
def _from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return an IntegerTypeInformation object."""
|
||||
if not (parsed := cast(dict[str, Any] | None, json_loads_object(type_data))):
|
||||
return None
|
||||
@@ -239,101 +284,22 @@ class IntegerTypeInformation(TypeInformation[float]):
|
||||
)
|
||||
|
||||
|
||||
_TYPE_INFORMATION_MAPPINGS: dict[DPType, type[TypeInformation]] = {
|
||||
DPType.BITMAP: BitmapTypeInformation,
|
||||
DPType.BOOLEAN: BooleanTypeInformation,
|
||||
DPType.ENUM: EnumTypeInformation,
|
||||
DPType.INTEGER: IntegerTypeInformation,
|
||||
DPType.JSON: TypeInformation,
|
||||
DPType.RAW: TypeInformation,
|
||||
DPType.STRING: TypeInformation,
|
||||
}
|
||||
@dataclass(kw_only=True)
|
||||
class JsonTypeInformation(TypeInformation[Any]):
|
||||
"""Json type information."""
|
||||
|
||||
_DPTYPE = DPType.JSON
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | tuple[str, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.BITMAP],
|
||||
) -> BitmapTypeInformation | None: ...
|
||||
@dataclass(kw_only=True)
|
||||
class RawTypeInformation(TypeInformation[Any]):
|
||||
"""Raw type information."""
|
||||
|
||||
_DPTYPE = DPType.RAW
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | tuple[str, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.BOOLEAN],
|
||||
) -> BooleanTypeInformation | None: ...
|
||||
@dataclass(kw_only=True)
|
||||
class StringTypeInformation(TypeInformation[str]):
|
||||
"""String type information."""
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | tuple[str, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.ENUM],
|
||||
) -> EnumTypeInformation | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | tuple[str, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.INTEGER],
|
||||
) -> IntegerTypeInformation | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | tuple[str, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.JSON, DPType.RAW],
|
||||
) -> TypeInformation | None: ...
|
||||
|
||||
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | tuple[str, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: DPType,
|
||||
) -> TypeInformation | None:
|
||||
"""Find type information for a matching DP code available for this device."""
|
||||
if not (type_information_cls := _TYPE_INFORMATION_MAPPINGS.get(dptype)):
|
||||
raise NotImplementedError(f"find_dpcode not supported for {dptype}")
|
||||
|
||||
if dpcodes is None:
|
||||
return None
|
||||
|
||||
if not isinstance(dpcodes, tuple):
|
||||
dpcodes = (dpcodes,)
|
||||
|
||||
lookup_tuple = (
|
||||
(device.function, device.status_range)
|
||||
if prefer_function
|
||||
else (device.status_range, device.function)
|
||||
)
|
||||
|
||||
for dpcode in dpcodes:
|
||||
for device_specs in lookup_tuple:
|
||||
if (
|
||||
(current_definition := device_specs.get(dpcode))
|
||||
and parse_dptype(current_definition.type) is dptype
|
||||
and (
|
||||
type_information := type_information_cls.from_json(
|
||||
dpcode=dpcode, type_data=current_definition.values
|
||||
)
|
||||
)
|
||||
):
|
||||
return type_information
|
||||
|
||||
return None
|
||||
_DPTYPE = DPType.STRING
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.components.light import (
|
||||
LightEntityDescription,
|
||||
LightEntityFeature,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.color import rgb_hex_to_rgb_list
|
||||
@@ -117,6 +118,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiLightEntityDescription, ...] = (
|
||||
UnifiLightEntityDescription[Devices, Device](
|
||||
key="LED control",
|
||||
translation_key="led_control",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
allowed_fn=lambda hub, obj_id: True,
|
||||
api_handler_fn=lambda api: api.devices,
|
||||
available_fn=async_device_available_fn,
|
||||
|
||||
@@ -158,6 +158,22 @@ async def _async_setup_entry(
|
||||
) -> None:
|
||||
await async_migrate_data(hass, entry, data_service.api, bootstrap)
|
||||
data_service.async_setup()
|
||||
|
||||
# Create the NVR device before loading platforms
|
||||
# This ensures via_device references work for all device entities
|
||||
nvr = bootstrap.nvr
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, nvr.mac)},
|
||||
identifiers={(DOMAIN, nvr.mac)},
|
||||
manufacturer="Ubiquiti",
|
||||
name=nvr.display_name,
|
||||
model=nvr.type,
|
||||
sw_version=str(nvr.version),
|
||||
configuration_url=nvr.api.base_url,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
hass.http.register_view(ThumbnailProxyView(hass))
|
||||
hass.http.register_view(SnapshotProxyView(hass))
|
||||
|
||||
@@ -31,6 +31,7 @@ from homeassistant.const import (
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_create_clientsession,
|
||||
async_get_clientsession,
|
||||
@@ -56,15 +57,113 @@ from .const import (
|
||||
)
|
||||
from .data import UFPConfigEntry, async_last_update_was_successful
|
||||
from .discovery import async_start_discovery
|
||||
from .utils import _async_resolve, _async_short_mac, _async_unifi_mac_from_hass
|
||||
from .utils import (
|
||||
_async_resolve,
|
||||
_async_short_mac,
|
||||
_async_unifi_mac_from_hass,
|
||||
async_create_api_client,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _filter_empty_credentials(user_input: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Filter out empty credential fields to preserve existing values."""
|
||||
return {k: v for k, v in user_input.items() if v not in (None, "")}
|
||||
|
||||
|
||||
def _normalize_port(data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Ensure port is stored as int (NumberSelector returns float)."""
|
||||
return {**data, CONF_PORT: int(data.get(CONF_PORT, DEFAULT_PORT))}
|
||||
|
||||
|
||||
def _build_data_without_credentials(entry_data: Mapping[str, Any]) -> dict[str, Any]:
|
||||
"""Build form data from existing config entry, excluding sensitive credentials."""
|
||||
return {
|
||||
CONF_HOST: entry_data[CONF_HOST],
|
||||
CONF_PORT: entry_data[CONF_PORT],
|
||||
CONF_VERIFY_SSL: entry_data[CONF_VERIFY_SSL],
|
||||
CONF_USERNAME: entry_data[CONF_USERNAME],
|
||||
}
|
||||
|
||||
|
||||
async def _async_clear_session_if_credentials_changed(
|
||||
hass: HomeAssistant,
|
||||
entry: UFPConfigEntry,
|
||||
new_data: Mapping[str, Any],
|
||||
) -> None:
|
||||
"""Clear stored session if credentials have changed to force fresh authentication."""
|
||||
existing_data = entry.data
|
||||
if existing_data.get(CONF_USERNAME) != new_data.get(
|
||||
CONF_USERNAME
|
||||
) or existing_data.get(CONF_PASSWORD) != new_data.get(CONF_PASSWORD):
|
||||
_LOGGER.debug("Credentials changed, clearing stored session")
|
||||
protect = async_create_api_client(hass, entry)
|
||||
try:
|
||||
await protect.clear_session()
|
||||
except Exception as ex: # noqa: BLE001
|
||||
_LOGGER.debug("Failed to clear session, continuing anyway: %s", ex)
|
||||
|
||||
|
||||
ENTRY_FAILURE_STATES = (
|
||||
ConfigEntryState.SETUP_ERROR,
|
||||
ConfigEntryState.SETUP_RETRY,
|
||||
)
|
||||
|
||||
# Selectors for config flow form fields
|
||||
_TEXT_SELECTOR = selector.TextSelector()
|
||||
_PASSWORD_SELECTOR = selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
)
|
||||
_PORT_SELECTOR = selector.NumberSelector(
|
||||
selector.NumberSelectorConfig(
|
||||
mode=selector.NumberSelectorMode.BOX, min=1, max=65535
|
||||
)
|
||||
)
|
||||
_BOOL_SELECTOR = selector.BooleanSelector()
|
||||
|
||||
|
||||
def _build_schema(
|
||||
*,
|
||||
include_host: bool = True,
|
||||
include_connection: bool = True,
|
||||
credentials_optional: bool = False,
|
||||
) -> vol.Schema:
|
||||
"""Build a config flow schema.
|
||||
|
||||
Args:
|
||||
include_host: Include host field (False when host comes from discovery).
|
||||
include_connection: Include port/verify_ssl fields.
|
||||
credentials_optional: Credentials optional (True to keep existing values).
|
||||
|
||||
"""
|
||||
req, opt = vol.Required, vol.Optional
|
||||
cred_key = opt if credentials_optional else req
|
||||
|
||||
schema: dict[vol.Marker, selector.Selector] = {}
|
||||
if include_host:
|
||||
schema[req(CONF_HOST)] = _TEXT_SELECTOR
|
||||
if include_connection:
|
||||
schema[req(CONF_PORT, default=DEFAULT_PORT)] = _PORT_SELECTOR
|
||||
schema[req(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL)] = _BOOL_SELECTOR
|
||||
schema[req(CONF_USERNAME)] = _TEXT_SELECTOR
|
||||
schema[cred_key(CONF_PASSWORD)] = _PASSWORD_SELECTOR
|
||||
schema[cred_key(CONF_API_KEY)] = _PASSWORD_SELECTOR
|
||||
return vol.Schema(schema)
|
||||
|
||||
|
||||
# Schemas for different flow contexts
|
||||
# User flow: all fields required
|
||||
CONFIG_SCHEMA = _build_schema()
|
||||
# Reconfigure flow: keep existing credentials if not provided
|
||||
RECONFIGURE_SCHEMA = _build_schema(credentials_optional=True)
|
||||
# Discovery flow: host comes from discovery, user sets port/ssl
|
||||
DISCOVERY_SCHEMA = _build_schema(include_host=False)
|
||||
# Reauth flow: only credentials, connection settings preserved
|
||||
REAUTH_SCHEMA = _build_schema(
|
||||
include_host=False, include_connection=False, credentials_optional=True
|
||||
)
|
||||
|
||||
|
||||
async def async_local_user_documentation_url(hass: HomeAssistant) -> str:
|
||||
"""Get the documentation url for creating a local user."""
|
||||
@@ -178,19 +277,40 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Confirm discovery."""
|
||||
errors: dict[str, str] = {}
|
||||
discovery_info = self._discovered_device
|
||||
|
||||
form_data = {
|
||||
CONF_HOST: discovery_info["direct_connect_domain"]
|
||||
or discovery_info["source_ip"],
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_VERIFY_SSL: bool(discovery_info["direct_connect_domain"]),
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
}
|
||||
|
||||
if user_input is not None:
|
||||
user_input[CONF_PORT] = DEFAULT_PORT
|
||||
# Merge user input with discovery info
|
||||
merged_input = {**form_data, **user_input}
|
||||
nvr_data = None
|
||||
if discovery_info["direct_connect_domain"]:
|
||||
user_input[CONF_HOST] = discovery_info["direct_connect_domain"]
|
||||
user_input[CONF_VERIFY_SSL] = True
|
||||
nvr_data, errors = await self._async_get_nvr_data(user_input)
|
||||
merged_input[CONF_HOST] = discovery_info["direct_connect_domain"]
|
||||
merged_input[CONF_VERIFY_SSL] = True
|
||||
nvr_data, errors = await self._async_get_nvr_data(merged_input)
|
||||
if not nvr_data or errors:
|
||||
user_input[CONF_HOST] = discovery_info["source_ip"]
|
||||
user_input[CONF_VERIFY_SSL] = False
|
||||
nvr_data, errors = await self._async_get_nvr_data(user_input)
|
||||
merged_input[CONF_HOST] = discovery_info["source_ip"]
|
||||
merged_input[CONF_VERIFY_SSL] = False
|
||||
nvr_data, errors = await self._async_get_nvr_data(merged_input)
|
||||
if nvr_data and not errors:
|
||||
return self._async_create_entry(nvr_data.display_name, user_input)
|
||||
return self._async_create_entry(nvr_data.display_name, merged_input)
|
||||
# Preserve user input for form re-display, but keep discovery info
|
||||
form_data = {
|
||||
CONF_HOST: merged_input[CONF_HOST],
|
||||
CONF_PORT: merged_input[CONF_PORT],
|
||||
CONF_VERIFY_SSL: merged_input[CONF_VERIFY_SSL],
|
||||
CONF_USERNAME: user_input.get(CONF_USERNAME, ""),
|
||||
CONF_PASSWORD: user_input.get(CONF_PASSWORD, ""),
|
||||
}
|
||||
if CONF_API_KEY in user_input:
|
||||
form_data[CONF_API_KEY] = user_input[CONF_API_KEY]
|
||||
|
||||
placeholders = {
|
||||
"name": discovery_info["hostname"]
|
||||
@@ -199,7 +319,6 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"ip_address": discovery_info["source_ip"],
|
||||
}
|
||||
self.context["title_placeholders"] = placeholders
|
||||
user_input = user_input or {}
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
description_placeholders={
|
||||
@@ -208,14 +327,8 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.hass
|
||||
),
|
||||
},
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_USERNAME, default=user_input.get(CONF_USERNAME)
|
||||
): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
DISCOVERY_SCHEMA, form_data
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -232,7 +345,7 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
def _async_create_entry(self, title: str, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data={**data, CONF_ID: title},
|
||||
data={**_normalize_port(data), CONF_ID: title},
|
||||
options={
|
||||
CONF_DISABLE_RTSP: False,
|
||||
CONF_ALL_UPDATES: False,
|
||||
@@ -251,7 +364,7 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
public_api_session = async_get_clientsession(self.hass)
|
||||
|
||||
host = user_input[CONF_HOST]
|
||||
port = user_input.get(CONF_PORT, DEFAULT_PORT)
|
||||
port = int(user_input.get(CONF_PORT, DEFAULT_PORT))
|
||||
verify_ssl = user_input.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL)
|
||||
|
||||
protect = ProtectApiClient(
|
||||
@@ -261,7 +374,7 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
port=port,
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
api_key=user_input[CONF_API_KEY],
|
||||
api_key=user_input.get(CONF_API_KEY, ""),
|
||||
verify_ssl=verify_ssl,
|
||||
cache_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect")),
|
||||
config_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect")),
|
||||
@@ -290,14 +403,17 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
auth_user = bootstrap.users.get(bootstrap.auth_user_id)
|
||||
if auth_user and auth_user.cloud_account:
|
||||
errors["base"] = "cloud_user"
|
||||
try:
|
||||
await protect.get_meta_info()
|
||||
except NotAuthorized as ex:
|
||||
_LOGGER.debug(ex)
|
||||
errors[CONF_API_KEY] = "invalid_auth"
|
||||
except ClientError as ex:
|
||||
_LOGGER.error(ex)
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
# Only validate API key if bootstrap succeeded
|
||||
if nvr_data and not errors:
|
||||
try:
|
||||
await protect.get_meta_info()
|
||||
except NotAuthorized as ex:
|
||||
_LOGGER.debug(ex)
|
||||
errors[CONF_API_KEY] = "invalid_auth"
|
||||
except ClientError as ex:
|
||||
_LOGGER.error(ex)
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
return nvr_data, errors
|
||||
|
||||
@@ -313,16 +429,27 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Confirm reauth."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
# prepopulate fields
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
form_data = {**reauth_entry.data}
|
||||
form_data = _build_data_without_credentials(reauth_entry.data)
|
||||
|
||||
if user_input is not None:
|
||||
form_data.update(user_input)
|
||||
# Merge with existing config - empty credentials keep existing values
|
||||
merged_input = {
|
||||
**reauth_entry.data,
|
||||
**_filter_empty_credentials(user_input),
|
||||
}
|
||||
|
||||
# Clear stored session if credentials changed to force fresh authentication
|
||||
await _async_clear_session_if_credentials_changed(
|
||||
self.hass, reauth_entry, merged_input
|
||||
)
|
||||
|
||||
# validate login data
|
||||
_, errors = await self._async_get_nvr_data(form_data)
|
||||
_, errors = await self._async_get_nvr_data(merged_input)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(reauth_entry, data=form_data)
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry, data=_normalize_port(merged_input)
|
||||
)
|
||||
|
||||
self.context["title_placeholders"] = {
|
||||
"name": reauth_entry.title,
|
||||
@@ -335,14 +462,58 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.hass
|
||||
),
|
||||
},
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_USERNAME, default=form_data.get(CONF_USERNAME)
|
||||
): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
data_schema=self.add_suggested_values_to_schema(REAUTH_SCHEMA, form_data),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
form_data = _build_data_without_credentials(reconfigure_entry.data)
|
||||
|
||||
if user_input is not None:
|
||||
# Merge with existing config - empty credentials keep existing values
|
||||
merged_input = {
|
||||
**reconfigure_entry.data,
|
||||
**_filter_empty_credentials(user_input),
|
||||
}
|
||||
|
||||
# Clear stored session if credentials changed to force fresh authentication
|
||||
await _async_clear_session_if_credentials_changed(
|
||||
self.hass, reconfigure_entry, merged_input
|
||||
)
|
||||
|
||||
# validate login data
|
||||
nvr_data, errors = await self._async_get_nvr_data(merged_input)
|
||||
if nvr_data and not errors:
|
||||
new_unique_id = _async_unifi_mac_from_hass(nvr_data.mac)
|
||||
_LOGGER.debug(
|
||||
"Reconfigure: Current unique_id=%s, NVR MAC=%s, formatted=%s",
|
||||
reconfigure_entry.unique_id,
|
||||
nvr_data.mac,
|
||||
new_unique_id,
|
||||
)
|
||||
await self.async_set_unique_id(new_unique_id)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_nvr")
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data=_normalize_port(merged_input),
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
description_placeholders={
|
||||
"local_user_documentation_url": await async_local_user_documentation_url(
|
||||
self.hass
|
||||
),
|
||||
},
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
RECONFIGURE_SCHEMA, form_data
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -362,7 +533,6 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._async_create_entry(nvr_data.display_name, user_input)
|
||||
|
||||
user_input = user_input or {}
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
description_placeholders={
|
||||
@@ -370,23 +540,7 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.hass
|
||||
)
|
||||
},
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST)): str,
|
||||
vol.Required(
|
||||
CONF_PORT, default=user_input.get(CONF_PORT, DEFAULT_PORT)
|
||||
): int,
|
||||
vol.Required(
|
||||
CONF_VERIFY_SSL,
|
||||
default=user_input.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL),
|
||||
): bool,
|
||||
vol.Required(
|
||||
CONF_USERNAME, default=user_input.get(CONF_USERNAME)
|
||||
): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
),
|
||||
data_schema=self.add_suggested_values_to_schema(CONFIG_SCHEMA, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"discovery_started": "Discovery started",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_nvr": "Connected to a different NVR than expected. If you replaced your hardware, please remove the old integration and add it again."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -17,12 +19,16 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::unifiprotect::config::step::user::data_description::api_key%]",
|
||||
"password": "[%key:component::unifiprotect::config::step::user::data_description::password%]",
|
||||
"username": "[%key:component::unifiprotect::config::step::user::data_description::username%]"
|
||||
"port": "[%key:component::unifiprotect::config::step::user::data_description::port%]",
|
||||
"username": "[%key:component::unifiprotect::config::step::user::data_description::username%]",
|
||||
"verify_ssl": "[%key:component::unifiprotect::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Do you want to set up {name} ({ip_address})? You will need a local user created in your UniFi OS Console to log in with. Ubiquiti Cloud users will not work. For more information: {local_user_documentation_url}",
|
||||
"title": "UniFi Protect discovered"
|
||||
@@ -30,20 +36,36 @@
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"host": "IP/Host of UniFi Protect server",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::unifiprotect::config::step::user::data_description::api_key%]",
|
||||
"host": "[%key:component::unifiprotect::config::step::user::data_description::host%]",
|
||||
"password": "[%key:component::unifiprotect::config::step::user::data_description::password%]",
|
||||
"port": "[%key:component::unifiprotect::config::step::user::data_description::port%]",
|
||||
"api_key": "API key for your local user account. Leave empty to keep your existing API key.",
|
||||
"password": "Password for your local user account. Leave empty to keep your existing password.",
|
||||
"username": "[%key:component::unifiprotect::config::step::user::data_description::username%]"
|
||||
},
|
||||
"description": "Your credentials or API key seem to be missing or invalid. For instructions on how to create a local user or generate a new API key, please refer to the documentation: {local_user_documentation_url}",
|
||||
"title": "UniFi Protect reauth"
|
||||
"description": "Your credentials or API key seem to be missing or invalid. Leave password and API key empty to keep your existing credentials. For more information: {local_user_documentation_url}",
|
||||
"title": "Reauth UniFi Protect"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::unifiprotect::config::step::reauth_confirm::data_description::api_key%]",
|
||||
"host": "[%key:component::unifiprotect::config::step::user::data_description::host%]",
|
||||
"password": "[%key:component::unifiprotect::config::step::reauth_confirm::data_description::password%]",
|
||||
"port": "[%key:component::unifiprotect::config::step::user::data_description::port%]",
|
||||
"username": "[%key:component::unifiprotect::config::step::user::data_description::username%]",
|
||||
"verify_ssl": "[%key:component::unifiprotect::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Update the configuration for your UniFi Protect device. Leave password and API key empty to keep your existing credentials. For more information: {local_user_documentation_url}",
|
||||
"title": "Reconfigure UniFi Protect"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
|
||||
@@ -630,7 +630,7 @@ class XboxSource(MediaSource):
|
||||
title=image.type,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
thumbnail=image.url,
|
||||
thumbnail=to_https(image.url),
|
||||
)
|
||||
for image in game.images
|
||||
]
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/yale",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["socketio", "engineio", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.1"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.2"]
|
||||
}
|
||||
|
||||
@@ -52,6 +52,7 @@ class YaleXSBLEBaseLock(YALEXSBLEEntity, LockEntity):
|
||||
elif lock_state in (
|
||||
LockStatus.UNKNOWN_01,
|
||||
LockStatus.UNKNOWN_06,
|
||||
LockStatus.JAMMED,
|
||||
):
|
||||
self._attr_is_jammed = True
|
||||
elif lock_state is LockStatus.UNKNOWN:
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/yalexs_ble",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["yalexs-ble==3.2.1"]
|
||||
"requirements": ["yalexs-ble==3.2.2"]
|
||||
}
|
||||
|
||||
@@ -394,12 +394,12 @@ class EntityTriggerBase(Trigger):
|
||||
if not from_state or from_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
return
|
||||
|
||||
# The trigger should never fire if the new state is not the to state
|
||||
if not to_state or not self.is_to_state(to_state):
|
||||
# The trigger should never fire if the previous state was not the from state
|
||||
if not to_state or not self.is_from_state(from_state, to_state):
|
||||
return
|
||||
|
||||
# The trigger should never fire if the previous state was not the from state
|
||||
if not self.is_from_state(from_state, to_state):
|
||||
# The trigger should never fire if the new state is not the to state
|
||||
if not self.is_to_state(to_state):
|
||||
return
|
||||
|
||||
if behavior == BEHAVIOR_LAST:
|
||||
@@ -453,22 +453,6 @@ class ConditionalEntityStateTriggerBase(EntityTriggerBase):
|
||||
return state.state in self._to_states
|
||||
|
||||
|
||||
class FromEntityStateTriggerBase(EntityTriggerBase):
|
||||
"""Class for entity state changes from a specific state."""
|
||||
|
||||
_from_state: str
|
||||
|
||||
def is_from_state(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the state matches the origin state."""
|
||||
return (
|
||||
from_state.state == self._from_state and to_state.state != self._from_state
|
||||
)
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
return state.state != self._from_state
|
||||
|
||||
|
||||
class EntityStateAttributeTriggerBase(EntityTriggerBase):
|
||||
"""Trigger for entity state attribute changes."""
|
||||
|
||||
@@ -509,20 +493,6 @@ def make_conditional_entity_state_trigger(
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
def make_from_entity_state_trigger(
|
||||
domain: str, *, from_state: str
|
||||
) -> type[FromEntityStateTriggerBase]:
|
||||
"""Create a "from" entity state trigger class."""
|
||||
|
||||
class CustomTrigger(FromEntityStateTriggerBase):
|
||||
"""Trigger for "from" entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_from_state = from_state
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
def make_entity_state_attribute_trigger(
|
||||
domain: str, attribute: str, to_state: str
|
||||
) -> type[EntityStateAttributeTriggerBase]:
|
||||
|
||||
16
requirements_all.txt
generated
16
requirements_all.txt
generated
@@ -187,7 +187,7 @@ aioairq==0.4.7
|
||||
aioairzone-cloud==0.7.2
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==1.0.2
|
||||
aioairzone==1.0.3
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==10.0.0
|
||||
@@ -252,7 +252,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==42.10.0
|
||||
aioesphomeapi==43.0.0
|
||||
|
||||
# homeassistant.components.matrix
|
||||
# homeassistant.components.slack
|
||||
@@ -393,7 +393,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==13.22.0
|
||||
aioshelly==13.23.0
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -893,7 +893,7 @@ energyflip-client==0.2.2
|
||||
energyid-webhooks==0.0.14
|
||||
|
||||
# homeassistant.components.energyzero
|
||||
energyzero==2.1.1
|
||||
energyzero==4.0.1
|
||||
|
||||
# homeassistant.components.enocean
|
||||
enocean==0.50
|
||||
@@ -2154,7 +2154,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.2.2
|
||||
pylamarzocco==2.2.3
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -2196,7 +2196,7 @@ pymeteoclimatic==0.1.0
|
||||
pymicro-vad==1.0.1
|
||||
|
||||
# homeassistant.components.miele
|
||||
pymiele==0.6.0
|
||||
pymiele==0.6.1
|
||||
|
||||
# homeassistant.components.xiaomi_tv
|
||||
pymitv==1.4.3
|
||||
@@ -2566,7 +2566,7 @@ python-rabbitair==0.0.8
|
||||
python-ripple-api==0.0.3
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==3.10.2
|
||||
python-roborock==3.10.10
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.45
|
||||
@@ -3215,7 +3215,7 @@ yalesmartalarmclient==0.4.3
|
||||
# homeassistant.components.august
|
||||
# homeassistant.components.yale
|
||||
# homeassistant.components.yalexs_ble
|
||||
yalexs-ble==3.2.1
|
||||
yalexs-ble==3.2.2
|
||||
|
||||
# homeassistant.components.august
|
||||
# homeassistant.components.yale
|
||||
|
||||
16
requirements_test_all.txt
generated
16
requirements_test_all.txt
generated
@@ -178,7 +178,7 @@ aioairq==0.4.7
|
||||
aioairzone-cloud==0.7.2
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==1.0.2
|
||||
aioairzone==1.0.3
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==10.0.0
|
||||
@@ -243,7 +243,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==42.10.0
|
||||
aioesphomeapi==43.0.0
|
||||
|
||||
# homeassistant.components.matrix
|
||||
# homeassistant.components.slack
|
||||
@@ -378,7 +378,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==13.22.0
|
||||
aioshelly==13.23.0
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -787,7 +787,7 @@ energyflip-client==0.2.2
|
||||
energyid-webhooks==0.0.14
|
||||
|
||||
# homeassistant.components.energyzero
|
||||
energyzero==2.1.1
|
||||
energyzero==4.0.1
|
||||
|
||||
# homeassistant.components.enocean
|
||||
enocean==0.50
|
||||
@@ -1813,7 +1813,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.8
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.2.2
|
||||
pylamarzocco==2.2.3
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -1852,7 +1852,7 @@ pymeteoclimatic==0.1.0
|
||||
pymicro-vad==1.0.1
|
||||
|
||||
# homeassistant.components.miele
|
||||
pymiele==0.6.0
|
||||
pymiele==0.6.1
|
||||
|
||||
# homeassistant.components.mochad
|
||||
pymochad==0.2.0
|
||||
@@ -2147,7 +2147,7 @@ python-pooldose==0.8.1
|
||||
python-rabbitair==0.0.8
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==3.10.2
|
||||
python-roborock==3.10.10
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.45
|
||||
@@ -2679,7 +2679,7 @@ yalesmartalarmclient==0.4.3
|
||||
# homeassistant.components.august
|
||||
# homeassistant.components.yale
|
||||
# homeassistant.components.yalexs_ble
|
||||
yalexs-ble==3.2.1
|
||||
yalexs-ble==3.2.2
|
||||
|
||||
# homeassistant.components.august
|
||||
# homeassistant.components.yale
|
||||
|
||||
@@ -12,7 +12,13 @@ from pyairobotrest.models import (
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airobot.const import DOMAIN
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_MAC,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
@@ -105,16 +111,24 @@ def mock_config_entry() -> MockConfigEntry:
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return [Platform.CLIMATE, Platform.SENSOR]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_airobot_client: AsyncMock,
|
||||
platforms: list[Platform],
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the Airobot integration for testing."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
with patch("homeassistant.components.airobot.PLATFORMS", platforms):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return mock_config_entry
|
||||
|
||||
220
tests/components/airobot/snapshots/test_sensor.ambr
Normal file
220
tests/components/airobot/snapshots/test_sensor.ambr
Normal file
@@ -0,0 +1,220 @@
|
||||
# serializer version: 1
|
||||
# name: test_sensors[sensor.test_thermostat_air_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_thermostat_air_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Air temperature',
|
||||
'platform': 'airobot',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'air_temperature',
|
||||
'unique_id': 'T01A1B2C3_air_temperature',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.test_thermostat_air_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Test Thermostat Air temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_thermostat_air_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '22.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.test_thermostat_error_count-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.test_thermostat_error_count',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Error count',
|
||||
'platform': 'airobot',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'errors',
|
||||
'unique_id': 'T01A1B2C3_errors',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.test_thermostat_error_count-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Test Thermostat Error count',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_thermostat_error_count',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.test_thermostat_heating_uptime-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.test_thermostat_heating_uptime',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
'sensor.private': dict({
|
||||
'suggested_unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.DURATION: 'duration'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Heating uptime',
|
||||
'platform': 'airobot',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'heating_uptime',
|
||||
'unique_id': 'T01A1B2C3_heating_uptime',
|
||||
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.test_thermostat_heating_uptime-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Test Thermostat Heating uptime',
|
||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_thermostat_heating_uptime',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '1.38888888888889',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.test_thermostat_humidity-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_thermostat_humidity',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.HUMIDITY: 'humidity'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Humidity',
|
||||
'platform': 'airobot',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'T01A1B2C3_humidity',
|
||||
'unit_of_measurement': '%',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.test_thermostat_humidity-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'humidity',
|
||||
'friendly_name': 'Test Thermostat Humidity',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': '%',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_thermostat_humidity',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '45.0',
|
||||
})
|
||||
# ---
|
||||
@@ -17,7 +17,7 @@ from homeassistant.components.climate import (
|
||||
SERVICE_SET_PRESET_MODE,
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
)
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
@@ -25,12 +25,19 @@ import homeassistant.helpers.entity_registry as er
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return [Platform.CLIMATE]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_integration")
|
||||
async def test_climate_entities(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
platforms: list[Platform],
|
||||
) -> None:
|
||||
"""Test climate entities."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
38
tests/components/airobot/test_sensor.py
Normal file
38
tests/components/airobot/test_sensor.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Tests for the Airobot sensor platform."""
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return [Platform.SENSOR]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
|
||||
async def test_sensors(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the sensor entities."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
|
||||
async def test_sensor_availability_without_optional_sensors(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test sensors are not created when optional hardware is not present."""
|
||||
# Default mock has no floor sensor, CO2, or AQI - they should not be created
|
||||
assert hass.states.get("sensor.test_thermostat_floor_temperature") is None
|
||||
assert hass.states.get("sensor.test_thermostat_carbon_dioxide") is None
|
||||
assert hass.states.get("sensor.test_thermostat_air_quality_index") is None
|
||||
@@ -1,231 +0,0 @@
|
||||
"""Test device_tracker trigger."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_LABEL_ID,
|
||||
CONF_ENTITY_ID,
|
||||
STATE_HOME,
|
||||
STATE_NOT_HOME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
|
||||
from tests.components import (
|
||||
StateDescription,
|
||||
arm_trigger,
|
||||
parametrize_target_entities,
|
||||
parametrize_trigger_states,
|
||||
set_or_remove_state,
|
||||
target_entities,
|
||||
)
|
||||
|
||||
STATE_WORK_ZONE = "work"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture(name="enable_experimental_triggers_conditions")
|
||||
def enable_experimental_triggers_conditions() -> Generator[None]:
|
||||
"""Enable experimental triggers and conditions."""
|
||||
with patch(
|
||||
"homeassistant.components.labs.async_is_preview_feature_enabled",
|
||||
return_value=True,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_device_trackers(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple device_trackers entities associated with different targets."""
|
||||
return (await target_entities(hass, "device_tracker"))["included"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"trigger_key",
|
||||
["device_tracker.entered_home", "device_tracker.left_home"],
|
||||
)
|
||||
async def test_device_tracker_triggers_gated_by_labs_flag(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, trigger_key: str
|
||||
) -> None:
|
||||
"""Test the device_tracker triggers are gated by the labs flag."""
|
||||
await arm_trigger(hass, trigger_key, None, {ATTR_LABEL_ID: "test_label"})
|
||||
assert (
|
||||
"Unnamed automation failed to setup triggers and has been disabled: Trigger "
|
||||
f"'{trigger_key}' requires the experimental 'New triggers and conditions' "
|
||||
"feature to be enabled in Home Assistant Labs settings (feature flag: "
|
||||
"'new_triggers_conditions')"
|
||||
) in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities("device_tracker"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.entered_home",
|
||||
target_states=[STATE_HOME],
|
||||
other_states=[STATE_NOT_HOME, STATE_WORK_ZONE],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.left_home",
|
||||
target_states=[STATE_NOT_HOME, STATE_WORK_ZONE],
|
||||
other_states=[STATE_HOME],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_device_tracker_home_trigger_behavior_any(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_device_trackers: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the device_tracker home triggers when any device_tracker changes to a specific state."""
|
||||
other_entity_ids = set(target_device_trackers) - {entity_id}
|
||||
|
||||
# Set all device_trackers, including the tested device_tracker, to the initial state
|
||||
for eid in target_device_trackers:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(hass, trigger, {}, trigger_target_config)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
# Check that changing other device_trackers also triggers
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == (entities_in_target - 1) * state["count"]
|
||||
service_calls.clear()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities("device_tracker"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.entered_home",
|
||||
target_states=[STATE_HOME],
|
||||
other_states=[STATE_NOT_HOME, STATE_WORK_ZONE],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.left_home",
|
||||
target_states=[STATE_NOT_HOME, STATE_WORK_ZONE],
|
||||
other_states=[STATE_HOME],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_device_tracker_state_trigger_behavior_first(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_device_trackers: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the device_tracker home triggers when the first device_tracker changes to a specific state."""
|
||||
other_entity_ids = set(target_device_trackers) - {entity_id}
|
||||
|
||||
# Set all device_trackers, including the tested device_tracker, to the initial state
|
||||
for eid in target_device_trackers:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(hass, trigger, {"behavior": "first"}, trigger_target_config)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
# Triggering other device_trackers should not cause the trigger to fire again
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities("device_tracker"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.entered_home",
|
||||
target_states=[STATE_HOME],
|
||||
other_states=[STATE_NOT_HOME, STATE_WORK_ZONE],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger="device_tracker.left_home",
|
||||
target_states=[STATE_NOT_HOME, STATE_WORK_ZONE],
|
||||
other_states=[STATE_HOME],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_device_tracker_state_trigger_behavior_last(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_device_trackers: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the device_tracker home triggers when the last device_tracker changes to a specific state."""
|
||||
other_entity_ids = set(target_device_trackers) - {entity_id}
|
||||
|
||||
# Set all device_trackers, including the tested device_tracker, to the initial state
|
||||
for eid in target_device_trackers:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(hass, trigger, {"behavior": "last"}, trigger_target_config)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
@@ -40,10 +40,10 @@ async def mock_energyzero(hass: HomeAssistant) -> AsyncGenerator[MagicMock]:
|
||||
"homeassistant.components.energyzero.coordinator.EnergyZero", autospec=True
|
||||
) as energyzero_mock:
|
||||
client = energyzero_mock.return_value
|
||||
client.energy_prices.return_value = Electricity.from_dict(
|
||||
client.get_electricity_prices_legacy.return_value = Electricity.from_dict(
|
||||
await async_load_json_object_fixture(hass, "today_energy.json", DOMAIN)
|
||||
)
|
||||
client.gas_prices.return_value = Gas.from_dict(
|
||||
client.get_gas_prices_legacy.return_value = Gas.from_dict(
|
||||
await async_load_json_object_fixture(hass, "today_gas.json", DOMAIN)
|
||||
)
|
||||
yield client
|
||||
|
||||
@@ -45,7 +45,7 @@ async def test_diagnostics_no_gas_today(
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test diagnostics, no gas sensors available."""
|
||||
mock_energyzero.gas_prices.side_effect = EnergyZeroNoDataError
|
||||
mock_energyzero.get_gas_prices_legacy.side_effect = EnergyZeroNoDataError
|
||||
|
||||
freezer.tick(SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
|
||||
@@ -40,7 +40,7 @@ async def test_no_gas_today(
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test the EnergyZero - No gas sensors available."""
|
||||
mock_energyzero.gas_prices.side_effect = EnergyZeroNoDataError
|
||||
mock_energyzero.get_gas_prices_legacy.side_effect = EnergyZeroNoDataError
|
||||
|
||||
freezer.tick(SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
|
||||
@@ -12,12 +12,14 @@ from aioesphomeapi import (
|
||||
AreaInfo,
|
||||
DeviceInfo,
|
||||
EncryptionPlaintextAPIError,
|
||||
ExecuteServiceResponse,
|
||||
HomeassistantServiceCall,
|
||||
InvalidAuthAPIError,
|
||||
InvalidEncryptionKeyAPIError,
|
||||
LogLevel,
|
||||
RequiresEncryptionAPIError,
|
||||
SubDeviceInfo,
|
||||
SupportsResponseType,
|
||||
UserService,
|
||||
UserServiceArg,
|
||||
UserServiceArgType,
|
||||
@@ -49,7 +51,7 @@ from homeassistant.const import (
|
||||
CONF_PORT,
|
||||
EVENT_HOMEASSISTANT_CLOSE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
@@ -1456,7 +1458,7 @@ async def test_esphome_user_service_fails(
|
||||
await hass.async_block_till_done()
|
||||
assert hass.services.has_service(DOMAIN, "with_dash_simple_service")
|
||||
|
||||
mock_client.execute_service = Mock(side_effect=APIConnectionError("fail"))
|
||||
mock_client.execute_service = AsyncMock(side_effect=APIConnectionError("fail"))
|
||||
with pytest.raises(HomeAssistantError) as exc:
|
||||
await hass.services.async_call(
|
||||
DOMAIN, "with_dash_simple_service", {"arg1": True}, blocking=True
|
||||
@@ -2812,3 +2814,462 @@ async def test_no_zwave_proxy_subscribe_without_feature_flags(
|
||||
|
||||
# Verify subscribe_zwave_proxy_request was NOT called
|
||||
mock_client.subscribe_zwave_proxy_request.assert_not_called()
|
||||
|
||||
|
||||
async def test_execute_service_response_type_none(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service with SupportsResponseType.NONE (fire and forget)."""
|
||||
service = UserService(
|
||||
name="fire_forget_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.NONE,
|
||||
)
|
||||
|
||||
# For NONE type, no response is expected
|
||||
mock_client.execute_service = AsyncMock(return_value=None)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.services.has_service(DOMAIN, "test_fire_forget_service")
|
||||
|
||||
# Call the service - should be fire and forget
|
||||
await hass.services.async_call(
|
||||
DOMAIN, "test_fire_forget_service", {"arg1": True}, blocking=True
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify execute_service was called without extra kwargs (fire and forget)
|
||||
mock_client.execute_service.assert_called_once()
|
||||
call_args = mock_client.execute_service.call_args
|
||||
assert call_args[0][1] == {"arg1": True}
|
||||
# Fire and forget - no return_response or other kwargs
|
||||
assert call_args[1] == {}
|
||||
|
||||
|
||||
async def test_execute_service_response_type_status(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service with SupportsResponseType.STATUS."""
|
||||
service = UserService(
|
||||
name="status_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.STATUS,
|
||||
)
|
||||
|
||||
# Set up mock response
|
||||
mock_client.execute_service = AsyncMock(
|
||||
return_value=ExecuteServiceResponse(
|
||||
call_id=1,
|
||||
success=True,
|
||||
error_message="",
|
||||
response_data=b"",
|
||||
)
|
||||
)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Call the service - should wait for response but not return data
|
||||
# Note: STATUS maps to SupportsResponse.NONE so we can't use return_response=True
|
||||
await hass.services.async_call(
|
||||
DOMAIN, "test_status_service", {"arg1": True}, blocking=True
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify return_response was False (STATUS doesn't need response_data)
|
||||
call_args = mock_client.execute_service.call_args
|
||||
assert call_args[1].get("return_response") is False
|
||||
|
||||
|
||||
async def test_execute_service_response_type_optional_without_return(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service with SupportsResponseType.OPTIONAL when caller doesn't request response."""
|
||||
service = UserService(
|
||||
name="optional_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.OPTIONAL,
|
||||
)
|
||||
|
||||
# Set up mock response
|
||||
mock_client.execute_service = AsyncMock(
|
||||
return_value=ExecuteServiceResponse(
|
||||
call_id=1,
|
||||
success=True,
|
||||
error_message="",
|
||||
response_data=b'{"result": "data"}',
|
||||
)
|
||||
)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Call without return_response - should still wait but not return data
|
||||
result = await hass.services.async_call(
|
||||
DOMAIN, "test_optional_service", {"arg1": True}, blocking=True
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result is None
|
||||
|
||||
# Verify return_response was False (caller didn't request it)
|
||||
call_args = mock_client.execute_service.call_args
|
||||
assert call_args[1].get("return_response") is False
|
||||
|
||||
|
||||
async def test_execute_service_response_type_optional_with_return(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service with SupportsResponseType.OPTIONAL when caller requests response."""
|
||||
service = UserService(
|
||||
name="optional_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.OPTIONAL,
|
||||
)
|
||||
|
||||
# Set up mock response with data
|
||||
mock_client.execute_service = AsyncMock(
|
||||
return_value=ExecuteServiceResponse(
|
||||
call_id=1,
|
||||
success=True,
|
||||
error_message="",
|
||||
response_data=b'{"result": "data"}',
|
||||
)
|
||||
)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Call with return_response=True
|
||||
result = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"test_optional_service",
|
||||
{"arg1": True},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Should return parsed JSON data
|
||||
assert result == {"result": "data"}
|
||||
|
||||
# Verify return_response was True
|
||||
call_args = mock_client.execute_service.call_args
|
||||
assert call_args[1].get("return_response") is True
|
||||
|
||||
|
||||
async def test_execute_service_response_type_only(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service with SupportsResponseType.ONLY."""
|
||||
service = UserService(
|
||||
name="only_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.ONLY,
|
||||
)
|
||||
|
||||
# Set up mock response
|
||||
mock_client.execute_service = AsyncMock(
|
||||
return_value=ExecuteServiceResponse(
|
||||
call_id=1,
|
||||
success=True,
|
||||
error_message="",
|
||||
response_data=b'{"status": "ok", "value": 42}',
|
||||
)
|
||||
)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Call the service - ONLY type always returns data
|
||||
result = await hass.services.async_call(
|
||||
DOMAIN, "test_only_service", {"arg1": True}, blocking=True, return_response=True
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result == {"status": "ok", "value": 42}
|
||||
|
||||
# Verify return_response was True
|
||||
call_args = mock_client.execute_service.call_args
|
||||
assert call_args[1].get("return_response") is True
|
||||
|
||||
|
||||
async def test_execute_service_timeout(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service timeout handling."""
|
||||
service = UserService(
|
||||
name="slow_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.STATUS,
|
||||
)
|
||||
|
||||
# Mock execute_service to raise TimeoutError
|
||||
mock_client.execute_service = AsyncMock(side_effect=TimeoutError())
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(HomeAssistantError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN, "test_slow_service", {"arg1": True}, blocking=True
|
||||
)
|
||||
|
||||
assert "Timeout" in str(exc_info.value)
|
||||
|
||||
|
||||
async def test_execute_service_connection_error(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service connection error handling."""
|
||||
service = UserService(
|
||||
name="error_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.NONE,
|
||||
)
|
||||
|
||||
mock_client.execute_service = AsyncMock(
|
||||
side_effect=APIConnectionError("Connection lost")
|
||||
)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(HomeAssistantError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN, "test_error_service", {"arg1": True}, blocking=True
|
||||
)
|
||||
|
||||
assert "Connection lost" in str(exc_info.value)
|
||||
|
||||
|
||||
async def test_execute_service_connection_error_with_response(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service connection error when waiting for response."""
|
||||
service = UserService(
|
||||
name="error_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.STATUS, # Uses response path
|
||||
)
|
||||
|
||||
mock_client.execute_service = AsyncMock(
|
||||
side_effect=APIConnectionError("Connection lost")
|
||||
)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(HomeAssistantError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN, "test_error_service", {"arg1": True}, blocking=True
|
||||
)
|
||||
|
||||
assert "Connection lost" in str(exc_info.value)
|
||||
|
||||
|
||||
async def test_execute_service_failure_response(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service with failure response from device."""
|
||||
service = UserService(
|
||||
name="failing_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.STATUS,
|
||||
)
|
||||
|
||||
# Set up mock failure response
|
||||
mock_client.execute_service = AsyncMock(
|
||||
return_value=ExecuteServiceResponse(
|
||||
call_id=1,
|
||||
success=False,
|
||||
error_message="Device reported error: invalid argument",
|
||||
response_data=b"",
|
||||
)
|
||||
)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(HomeAssistantError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN, "test_failing_service", {"arg1": True}, blocking=True
|
||||
)
|
||||
|
||||
assert "invalid argument" in str(exc_info.value)
|
||||
|
||||
|
||||
async def test_execute_service_invalid_json_response(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test execute_service with invalid JSON in response data."""
|
||||
service = UserService(
|
||||
name="bad_json_service",
|
||||
key=1,
|
||||
args=[UserServiceArg(name="arg1", type=UserServiceArgType.BOOL)],
|
||||
supports_response=SupportsResponseType.ONLY,
|
||||
)
|
||||
|
||||
# Set up mock response with invalid JSON
|
||||
mock_client.execute_service = AsyncMock(
|
||||
return_value=ExecuteServiceResponse(
|
||||
call_id=1,
|
||||
success=True,
|
||||
error_message="",
|
||||
response_data=b"not valid json {{{",
|
||||
)
|
||||
)
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=[service],
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(HomeAssistantError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"test_bad_json_service",
|
||||
{"arg1": True},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert "Invalid JSON response" in str(exc_info.value)
|
||||
|
||||
|
||||
async def test_service_registration_response_types(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test that services are registered with correct SupportsResponse types."""
|
||||
services = [
|
||||
UserService(
|
||||
name="none_service",
|
||||
key=1,
|
||||
args=[],
|
||||
supports_response=SupportsResponseType.NONE,
|
||||
),
|
||||
UserService(
|
||||
name="optional_service",
|
||||
key=2,
|
||||
args=[],
|
||||
supports_response=SupportsResponseType.OPTIONAL,
|
||||
),
|
||||
UserService(
|
||||
name="only_service",
|
||||
key=3,
|
||||
args=[],
|
||||
supports_response=SupportsResponseType.ONLY,
|
||||
),
|
||||
UserService(
|
||||
name="status_service",
|
||||
key=4,
|
||||
args=[],
|
||||
supports_response=SupportsResponseType.STATUS,
|
||||
),
|
||||
]
|
||||
|
||||
await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
user_service=services,
|
||||
device_info={"name": "test"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify all services are registered
|
||||
assert hass.services.has_service(DOMAIN, "test_none_service")
|
||||
assert hass.services.has_service(DOMAIN, "test_optional_service")
|
||||
assert hass.services.has_service(DOMAIN, "test_only_service")
|
||||
assert hass.services.has_service(DOMAIN, "test_status_service")
|
||||
|
||||
# Verify response types are correctly mapped using public API
|
||||
# NONE -> SupportsResponse.NONE
|
||||
# OPTIONAL -> SupportsResponse.OPTIONAL
|
||||
# ONLY -> SupportsResponse.ONLY
|
||||
# STATUS -> SupportsResponse.NONE (no data returned to HA)
|
||||
assert (
|
||||
hass.services.supports_response(DOMAIN, "test_none_service")
|
||||
== SupportsResponse.NONE
|
||||
)
|
||||
assert (
|
||||
hass.services.supports_response(DOMAIN, "test_optional_service")
|
||||
== SupportsResponse.OPTIONAL
|
||||
)
|
||||
assert (
|
||||
hass.services.supports_response(DOMAIN, "test_only_service")
|
||||
== SupportsResponse.ONLY
|
||||
)
|
||||
assert (
|
||||
hass.services.supports_response(DOMAIN, "test_status_service")
|
||||
== SupportsResponse.NONE
|
||||
)
|
||||
|
||||
@@ -59,6 +59,7 @@ MOCK_TRACKER = Tracker(
|
||||
not_charging=True,
|
||||
overall=True,
|
||||
),
|
||||
icon="http://res.cloudinary.com/iot-venture/image/upload/v1717594357/kyaqq7nfitrdvaoakb8s.jpg",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
# name: test_state_entity_device_snapshots[device_tracker.fluffy-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'entity_picture': 'http://res.cloudinary.com/iot-venture/image/upload/v1717594357/kyaqq7nfitrdvaoakb8s.jpg',
|
||||
'friendly_name': 'Fluffy',
|
||||
'gps_accuracy': 10.0,
|
||||
'latitude': 52.520008,
|
||||
|
||||
@@ -4141,27 +4141,34 @@
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'automatic_plus',
|
||||
'bed_linen',
|
||||
'clean_machine',
|
||||
'cool_air',
|
||||
'cottonrepair',
|
||||
'cottons',
|
||||
'cottons_eco',
|
||||
'cottons_hygiene',
|
||||
'curtains',
|
||||
'dark_garments',
|
||||
'dark_jeans',
|
||||
'delicates',
|
||||
'denim',
|
||||
'down_duvets',
|
||||
'down_filled_items',
|
||||
'drain_spin',
|
||||
'easy_care',
|
||||
'eco_40_60',
|
||||
'express_20',
|
||||
'first_wash',
|
||||
'freshen_up',
|
||||
'game_pieces',
|
||||
'minimum_iron',
|
||||
'no_program',
|
||||
'normal',
|
||||
'outdoor_garments',
|
||||
'outerwear',
|
||||
'pillows',
|
||||
'powerfresh',
|
||||
'pre_ironing',
|
||||
'proofing',
|
||||
'quick_power_wash',
|
||||
'rinse',
|
||||
@@ -4169,10 +4176,13 @@
|
||||
'separate_rinse_starch',
|
||||
'shirts',
|
||||
'silks',
|
||||
'smartmatic',
|
||||
'sportswear',
|
||||
'starch',
|
||||
'steam_care',
|
||||
'stuffed_toys',
|
||||
'trainers',
|
||||
'trainers_refresh',
|
||||
'warm_air',
|
||||
'woollens',
|
||||
]),
|
||||
@@ -4213,27 +4223,34 @@
|
||||
'friendly_name': 'Washing machine Program',
|
||||
'options': list([
|
||||
'automatic_plus',
|
||||
'bed_linen',
|
||||
'clean_machine',
|
||||
'cool_air',
|
||||
'cottonrepair',
|
||||
'cottons',
|
||||
'cottons_eco',
|
||||
'cottons_hygiene',
|
||||
'curtains',
|
||||
'dark_garments',
|
||||
'dark_jeans',
|
||||
'delicates',
|
||||
'denim',
|
||||
'down_duvets',
|
||||
'down_filled_items',
|
||||
'drain_spin',
|
||||
'easy_care',
|
||||
'eco_40_60',
|
||||
'express_20',
|
||||
'first_wash',
|
||||
'freshen_up',
|
||||
'game_pieces',
|
||||
'minimum_iron',
|
||||
'no_program',
|
||||
'normal',
|
||||
'outdoor_garments',
|
||||
'outerwear',
|
||||
'pillows',
|
||||
'powerfresh',
|
||||
'pre_ironing',
|
||||
'proofing',
|
||||
'quick_power_wash',
|
||||
'rinse',
|
||||
@@ -4241,10 +4258,13 @@
|
||||
'separate_rinse_starch',
|
||||
'shirts',
|
||||
'silks',
|
||||
'smartmatic',
|
||||
'sportswear',
|
||||
'starch',
|
||||
'steam_care',
|
||||
'stuffed_toys',
|
||||
'trainers',
|
||||
'trainers_refresh',
|
||||
'warm_air',
|
||||
'woollens',
|
||||
]),
|
||||
@@ -4265,6 +4285,7 @@
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'anti_crease',
|
||||
'automatic_start',
|
||||
'cleaning',
|
||||
'cooling_down',
|
||||
'disinfecting',
|
||||
@@ -4322,6 +4343,7 @@
|
||||
'friendly_name': 'Washing machine Program phase',
|
||||
'options': list([
|
||||
'anti_crease',
|
||||
'automatic_start',
|
||||
'cleaning',
|
||||
'cooling_down',
|
||||
'disinfecting',
|
||||
@@ -6489,27 +6511,34 @@
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'automatic_plus',
|
||||
'bed_linen',
|
||||
'clean_machine',
|
||||
'cool_air',
|
||||
'cottonrepair',
|
||||
'cottons',
|
||||
'cottons_eco',
|
||||
'cottons_hygiene',
|
||||
'curtains',
|
||||
'dark_garments',
|
||||
'dark_jeans',
|
||||
'delicates',
|
||||
'denim',
|
||||
'down_duvets',
|
||||
'down_filled_items',
|
||||
'drain_spin',
|
||||
'easy_care',
|
||||
'eco_40_60',
|
||||
'express_20',
|
||||
'first_wash',
|
||||
'freshen_up',
|
||||
'game_pieces',
|
||||
'minimum_iron',
|
||||
'no_program',
|
||||
'normal',
|
||||
'outdoor_garments',
|
||||
'outerwear',
|
||||
'pillows',
|
||||
'powerfresh',
|
||||
'pre_ironing',
|
||||
'proofing',
|
||||
'quick_power_wash',
|
||||
'rinse',
|
||||
@@ -6517,10 +6546,13 @@
|
||||
'separate_rinse_starch',
|
||||
'shirts',
|
||||
'silks',
|
||||
'smartmatic',
|
||||
'sportswear',
|
||||
'starch',
|
||||
'steam_care',
|
||||
'stuffed_toys',
|
||||
'trainers',
|
||||
'trainers_refresh',
|
||||
'warm_air',
|
||||
'woollens',
|
||||
]),
|
||||
@@ -6561,27 +6593,34 @@
|
||||
'friendly_name': 'Washing machine Program',
|
||||
'options': list([
|
||||
'automatic_plus',
|
||||
'bed_linen',
|
||||
'clean_machine',
|
||||
'cool_air',
|
||||
'cottonrepair',
|
||||
'cottons',
|
||||
'cottons_eco',
|
||||
'cottons_hygiene',
|
||||
'curtains',
|
||||
'dark_garments',
|
||||
'dark_jeans',
|
||||
'delicates',
|
||||
'denim',
|
||||
'down_duvets',
|
||||
'down_filled_items',
|
||||
'drain_spin',
|
||||
'easy_care',
|
||||
'eco_40_60',
|
||||
'express_20',
|
||||
'first_wash',
|
||||
'freshen_up',
|
||||
'game_pieces',
|
||||
'minimum_iron',
|
||||
'no_program',
|
||||
'normal',
|
||||
'outdoor_garments',
|
||||
'outerwear',
|
||||
'pillows',
|
||||
'powerfresh',
|
||||
'pre_ironing',
|
||||
'proofing',
|
||||
'quick_power_wash',
|
||||
'rinse',
|
||||
@@ -6589,10 +6628,13 @@
|
||||
'separate_rinse_starch',
|
||||
'shirts',
|
||||
'silks',
|
||||
'smartmatic',
|
||||
'sportswear',
|
||||
'starch',
|
||||
'steam_care',
|
||||
'stuffed_toys',
|
||||
'trainers',
|
||||
'trainers_refresh',
|
||||
'warm_air',
|
||||
'woollens',
|
||||
]),
|
||||
@@ -6613,6 +6655,7 @@
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'anti_crease',
|
||||
'automatic_start',
|
||||
'cleaning',
|
||||
'cooling_down',
|
||||
'disinfecting',
|
||||
@@ -6670,6 +6713,7 @@
|
||||
'friendly_name': 'Washing machine Program phase',
|
||||
'options': list([
|
||||
'anti_crease',
|
||||
'automatic_start',
|
||||
'cleaning',
|
||||
'cooling_down',
|
||||
'disinfecting',
|
||||
|
||||
@@ -54,6 +54,57 @@
|
||||
'state': '3.5',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_and_update[packet_loss]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.10_10_10_10_packet_loss',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Packet loss',
|
||||
'platform': 'ping',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'loss',
|
||||
'unit_of_measurement': '%',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_and_update[packet_loss].1
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': '10.10.10.10 Packet loss',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': '%',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.10_10_10_10_packet_loss',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '0.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_and_update[round_trip_time_average]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
"round_trip_time_mean_deviation", # should be None in the snapshot
|
||||
"round_trip_time_minimum",
|
||||
"jitter",
|
||||
"packet_loss",
|
||||
],
|
||||
)
|
||||
async def test_setup_and_update(
|
||||
|
||||
@@ -25,6 +25,7 @@ from roborock.data import (
|
||||
ZeoState,
|
||||
)
|
||||
from roborock.devices.device import RoborockDevice
|
||||
from roborock.devices.device_manager import DeviceManager
|
||||
from roborock.devices.traits.v1 import PropertiesApi
|
||||
from roborock.devices.traits.v1.clean_summary import CleanSummaryTrait
|
||||
from roborock.devices.traits.v1.command import CommandTrait
|
||||
@@ -134,18 +135,6 @@ class FakeDevice(RoborockDevice):
|
||||
"""Close the device."""
|
||||
|
||||
|
||||
class FakeDeviceManager:
|
||||
"""A fake device manager that returns a list of devices."""
|
||||
|
||||
def __init__(self, devices: list[RoborockDevice]) -> None:
|
||||
"""Initialize the fake device manager."""
|
||||
self._devices = devices
|
||||
|
||||
async def get_devices(self) -> list[RoborockDevice]:
|
||||
"""Return the list of devices."""
|
||||
return self._devices
|
||||
|
||||
|
||||
def make_mock_trait(
|
||||
trait_spec: type[V1TraitMixin] | None = None,
|
||||
dataclass_template: RoborockBase | None = None,
|
||||
@@ -348,16 +337,26 @@ def fake_vacuum_command_fixture(
|
||||
return command_trait
|
||||
|
||||
|
||||
@pytest.fixture(name="device_manager")
|
||||
def device_manager_fixture(
|
||||
fake_devices: list[FakeDevice],
|
||||
) -> AsyncMock:
|
||||
"""Fixture to create a fake device manager."""
|
||||
device_manager = AsyncMock(spec=DeviceManager)
|
||||
device_manager.get_devices = AsyncMock(return_value=fake_devices)
|
||||
return device_manager
|
||||
|
||||
|
||||
@pytest.fixture(name="fake_create_device_manager", autouse=True)
|
||||
def fake_create_device_manager_fixture(
|
||||
fake_devices: list[FakeDevice],
|
||||
) -> Generator[Mock]:
|
||||
device_manager: AsyncMock,
|
||||
) -> None:
|
||||
"""Fixture to create a fake device manager."""
|
||||
with patch(
|
||||
"homeassistant.components.roborock.create_device_manager",
|
||||
) as mock_create_device_manager:
|
||||
mock_create_device_manager.return_value = FakeDeviceManager(fake_devices)
|
||||
yield mock_create_device_manager
|
||||
mock_create_device_manager.return_value = device_manager
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry_data")
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import pathlib
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from roborock import (
|
||||
@@ -26,14 +26,42 @@ from tests.common import MockConfigEntry
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
async def test_unload_entry(hass: HomeAssistant, setup_entry: MockConfigEntry) -> None:
|
||||
"""Test unloading roboorck integration."""
|
||||
async def test_unload_entry(
|
||||
hass: HomeAssistant,
|
||||
setup_entry: MockConfigEntry,
|
||||
device_manager: AsyncMock,
|
||||
) -> None:
|
||||
"""Test unloading roborock integration."""
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
assert setup_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert device_manager.get_devices.called
|
||||
assert not device_manager.close.called
|
||||
|
||||
# Unload the config entry and verify that the device manager is closed
|
||||
assert await hass.config_entries.async_unload(setup_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert setup_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
assert device_manager.close.called
|
||||
|
||||
|
||||
async def test_home_assistant_stop(
|
||||
hass: HomeAssistant,
|
||||
setup_entry: MockConfigEntry,
|
||||
device_manager: AsyncMock,
|
||||
) -> None:
|
||||
"""Test shutting down Home Assistant."""
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
assert setup_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert not device_manager.close.called
|
||||
|
||||
# Perform Home Assistant stop and verify that device manager is closed
|
||||
await hass.async_stop()
|
||||
|
||||
assert device_manager.close.called
|
||||
|
||||
|
||||
async def test_reauth_started(
|
||||
hass: HomeAssistant, mock_roborock_entry: MockConfigEntry
|
||||
|
||||
@@ -603,6 +603,8 @@ def _mock_blu_rtv_device(version: str | None = None):
|
||||
}
|
||||
),
|
||||
xmod_info={},
|
||||
wifi_setconfig=AsyncMock(return_value={}),
|
||||
ble_setconfig=AsyncMock(return_value={}),
|
||||
)
|
||||
type(device).name = PropertyMock(return_value="Test name")
|
||||
return device
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1186,7 +1186,7 @@ async def test_sub_device_area_from_main_device(
|
||||
|
||||
# verify sub-devices have the same area as main device
|
||||
for relay_index in range(2):
|
||||
entity_id = f"switch.test_name_switch_{relay_index}"
|
||||
entity_id = f"switch.test_name_output_{relay_index}"
|
||||
assert hass.states.get(entity_id) is not None
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry
|
||||
|
||||
@@ -45,7 +45,7 @@ async def test_shelly_2pm_gen3_no_relay_names(
|
||||
config_entry = await init_integration(hass, gen=3, model=MODEL_2PM_G3)
|
||||
|
||||
# Relay 0 sub-device
|
||||
entity_id = "switch.test_name_switch_0"
|
||||
entity_id = "switch.test_name_output_0"
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
@@ -55,9 +55,9 @@ async def test_shelly_2pm_gen3_no_relay_names(
|
||||
|
||||
device_entry = device_registry.async_get(entry.device_id)
|
||||
assert device_entry
|
||||
assert device_entry.name == "Test name Switch 0"
|
||||
assert device_entry.name == "Test name Output 0"
|
||||
|
||||
entity_id = "sensor.test_name_switch_0_power"
|
||||
entity_id = "sensor.test_name_output_0_power"
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
@@ -67,10 +67,10 @@ async def test_shelly_2pm_gen3_no_relay_names(
|
||||
|
||||
device_entry = device_registry.async_get(entry.device_id)
|
||||
assert device_entry
|
||||
assert device_entry.name == "Test name Switch 0"
|
||||
assert device_entry.name == "Test name Output 0"
|
||||
|
||||
# Relay 1 sub-device
|
||||
entity_id = "switch.test_name_switch_1"
|
||||
entity_id = "switch.test_name_output_1"
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
@@ -80,9 +80,9 @@ async def test_shelly_2pm_gen3_no_relay_names(
|
||||
|
||||
device_entry = device_registry.async_get(entry.device_id)
|
||||
assert device_entry
|
||||
assert device_entry.name == "Test name Switch 1"
|
||||
assert device_entry.name == "Test name Output 1"
|
||||
|
||||
entity_id = "sensor.test_name_switch_1_power"
|
||||
entity_id = "sensor.test_name_output_1_power"
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
@@ -92,7 +92,7 @@ async def test_shelly_2pm_gen3_no_relay_names(
|
||||
|
||||
device_entry = device_registry.async_get(entry.device_id)
|
||||
assert device_entry
|
||||
assert device_entry.name == "Test name Switch 1"
|
||||
assert device_entry.name == "Test name Output 1"
|
||||
|
||||
# Main device
|
||||
entity_id = "update.test_name_firmware"
|
||||
|
||||
@@ -34,6 +34,7 @@ from homeassistant.components.shelly.utils import (
|
||||
get_release_url,
|
||||
get_rpc_channel_name,
|
||||
get_rpc_input_triggers,
|
||||
get_rpc_sub_device_name,
|
||||
is_block_momentary_input,
|
||||
mac_address_from_name,
|
||||
)
|
||||
@@ -319,3 +320,53 @@ async def test_shelly_receiver_get() -> None:
|
||||
|
||||
ws_server.websocket_handler.assert_awaited_once_with(mock_request)
|
||||
assert response == "test_response"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("key", "expected"),
|
||||
[
|
||||
("switch:0", "Test name Output 0"),
|
||||
("switch:1", "Test name Output 1"),
|
||||
("cover:0", "Test name Cover 0"),
|
||||
("light:0", "Test name Light 0"),
|
||||
("rgb:0", "Test name RGB light 0"),
|
||||
("rgbw:1", "Test name RGBW light 1"),
|
||||
("cct:0", "Test name CCT light 0"),
|
||||
("em1:0", "Test name Energy Meter 0"),
|
||||
],
|
||||
)
|
||||
async def test_get_rpc_sub_device_name(
|
||||
mock_rpc_device: Mock,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
key: str,
|
||||
expected: str,
|
||||
) -> None:
|
||||
"""Test get RPC sub-device name."""
|
||||
# Ensure the key has no custom name set
|
||||
config = {key: {"name": None}}
|
||||
monkeypatch.setattr(mock_rpc_device, "config", config)
|
||||
|
||||
assert get_rpc_sub_device_name(mock_rpc_device, key) == expected
|
||||
|
||||
|
||||
async def test_get_rpc_sub_device_name_with_custom_name(
|
||||
mock_rpc_device: Mock,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Test get RPC sub-device name with custom name."""
|
||||
config = {"switch:0": {"name": "My Custom Output"}}
|
||||
monkeypatch.setattr(mock_rpc_device, "config", config)
|
||||
|
||||
assert get_rpc_sub_device_name(mock_rpc_device, "switch:0") == "My Custom Output"
|
||||
|
||||
|
||||
async def test_get_rpc_sub_device_name_with_emeter_phase(
|
||||
mock_rpc_device: Mock,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Test get RPC sub-device name with emeter phase."""
|
||||
config = {"em:0": {"name": None}}
|
||||
monkeypatch.setattr(mock_rpc_device, "config", config)
|
||||
|
||||
assert get_rpc_sub_device_name(mock_rpc_device, "em:0", "A") == "Test name Phase A"
|
||||
assert get_rpc_sub_device_name(mock_rpc_device, "em:0", "B") == "Test name Phase B"
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'light',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'light.device_with_led_led',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Generator
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from ipaddress import IPv4Address
|
||||
@@ -32,7 +32,15 @@ from uiprotect.data import (
|
||||
from uiprotect.websocket import WebsocketState
|
||||
|
||||
from homeassistant.components.unifiprotect.const import DOMAIN
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.components.unifiprotect.utils import _async_unifi_mac_from_hass
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -43,6 +51,14 @@ from tests.common import MockConfigEntry, load_fixture
|
||||
|
||||
MAC_ADDR = "aa:bb:cc:dd:ee:ff"
|
||||
|
||||
# Common test data constants
|
||||
DEFAULT_HOST = "1.1.1.1"
|
||||
DEFAULT_PORT = 443
|
||||
DEFAULT_VERIFY_SSL = False
|
||||
DEFAULT_USERNAME = "test-username"
|
||||
DEFAULT_PASSWORD = "test-password"
|
||||
DEFAULT_API_KEY = "test-api-key"
|
||||
|
||||
|
||||
@pytest.fixture(name="nvr")
|
||||
def mock_nvr():
|
||||
@@ -66,13 +82,13 @@ def mock_ufp_config_entry():
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"host": "1.1.1.1",
|
||||
"username": "test-username",
|
||||
"password": "test-password",
|
||||
CONF_API_KEY: "test-api-key",
|
||||
CONF_HOST: DEFAULT_HOST,
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
CONF_PASSWORD: DEFAULT_PASSWORD,
|
||||
CONF_API_KEY: DEFAULT_API_KEY,
|
||||
"id": "UnifiProtect",
|
||||
"port": 443,
|
||||
"verify_ssl": False,
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||
},
|
||||
version=2,
|
||||
)
|
||||
@@ -371,6 +387,78 @@ def fixed_now_fixture():
|
||||
return dt_util.utcnow()
|
||||
|
||||
|
||||
@pytest.fixture(name="ufp_reauth_entry")
|
||||
def mock_ufp_reauth_entry():
|
||||
"""Mock the unifiprotect config entry for reauth and reconfigure tests."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_HOST: DEFAULT_HOST,
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
CONF_PASSWORD: DEFAULT_PASSWORD,
|
||||
CONF_API_KEY: DEFAULT_API_KEY,
|
||||
"id": "UnifiProtect",
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||
},
|
||||
unique_id=_async_unifi_mac_from_hass(MAC_ADDR),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="ufp_reauth_entry_alt")
|
||||
def mock_ufp_reauth_entry_alt():
|
||||
"""Mock the unifiprotect config entry with alternate port/SSL for reauth/reconfigure tests."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_HOST: DEFAULT_HOST,
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
CONF_PASSWORD: DEFAULT_PASSWORD,
|
||||
CONF_API_KEY: DEFAULT_API_KEY,
|
||||
"id": "UnifiProtect",
|
||||
CONF_PORT: 8443,
|
||||
CONF_VERIFY_SSL: True,
|
||||
},
|
||||
unique_id=_async_unifi_mac_from_hass(MAC_ADDR),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_setup")
|
||||
def mock_setup_fixture() -> Generator[AsyncMock]:
|
||||
"""Mock async_setup and async_setup_entry to prevent reload issues in tests."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.unifiprotect.async_setup",
|
||||
return_value=True,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.unifiprotect.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock,
|
||||
):
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_api_bootstrap")
|
||||
def mock_api_bootstrap_fixture(bootstrap: Bootstrap):
|
||||
"""Mock the ProtectApiClient.get_bootstrap method."""
|
||||
with patch(
|
||||
"homeassistant.components.unifiprotect.config_flow.ProtectApiClient.get_bootstrap",
|
||||
return_value=bootstrap,
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_api_meta_info")
|
||||
def mock_api_meta_info_fixture():
|
||||
"""Mock the ProtectApiClient.get_meta_info method."""
|
||||
with patch(
|
||||
"homeassistant.components.unifiprotect.config_flow.ProtectApiClient.get_meta_info",
|
||||
return_value=None,
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture(name="cloud_account")
|
||||
def cloud_account() -> CloudAccount:
|
||||
"""Return UI Cloud Account."""
|
||||
|
||||
36
tests/components/unifiprotect/snapshots/test_init.ambr
Normal file
36
tests/components/unifiprotect/snapshots/test_init.ambr
Normal file
@@ -0,0 +1,36 @@
|
||||
# serializer version: 1
|
||||
# name: test_setup_creates_nvr_device
|
||||
DeviceRegistryEntrySnapshot({
|
||||
'area_id': None,
|
||||
'config_entries': <ANY>,
|
||||
'config_entries_subentries': <ANY>,
|
||||
'configuration_url': 'https://127.0.0.1',
|
||||
'connections': set({
|
||||
tuple(
|
||||
'mac',
|
||||
'a1:e0:0c:82:69:24',
|
||||
),
|
||||
}),
|
||||
'disabled_by': None,
|
||||
'entry_type': None,
|
||||
'hw_version': None,
|
||||
'id': <ANY>,
|
||||
'identifiers': set({
|
||||
tuple(
|
||||
'unifiprotect',
|
||||
'A1E00C826924',
|
||||
),
|
||||
}),
|
||||
'labels': set({
|
||||
}),
|
||||
'manufacturer': 'Ubiquiti',
|
||||
'model': 'UNVR-PRO',
|
||||
'model_id': None,
|
||||
'name': 'UnifiProtect',
|
||||
'name_by_user': None,
|
||||
'primary_config_entry': <ANY>,
|
||||
'serial_number': None,
|
||||
'sw_version': '6.0.0',
|
||||
'via_device_id': None,
|
||||
})
|
||||
# ---
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
from uiprotect import NvrError, ProtectApiClient
|
||||
from uiprotect.api import DEVICE_UPDATE_INTERVAL
|
||||
from uiprotect.data import NVR, Bootstrap, CloudAccount, Light
|
||||
@@ -44,6 +45,30 @@ def mock_user_can_write_nvr(request: pytest.FixtureRequest, ufp: MockUFPFixture)
|
||||
object.__setattr__(ufp.api.bootstrap.nvr, "can_write", original_can_write)
|
||||
|
||||
|
||||
async def test_setup_creates_nvr_device(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
ufp: MockUFPFixture,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test that setup creates the NVR device before loading platforms.
|
||||
|
||||
This ensures that via_device references from camera/sensor entities
|
||||
to the NVR device work correctly.
|
||||
"""
|
||||
await hass.config_entries.async_setup(ufp.entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert ufp.entry.state is ConfigEntryState.LOADED
|
||||
|
||||
# Verify NVR device was created
|
||||
nvr = ufp.api.bootstrap.nvr
|
||||
nvr_device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, nvr.mac)},
|
||||
)
|
||||
assert nvr_device == snapshot
|
||||
|
||||
|
||||
async def test_setup(hass: HomeAssistant, ufp: MockUFPFixture) -> None:
|
||||
"""Test working setup of unifiprotect entry."""
|
||||
|
||||
|
||||
@@ -259,7 +259,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/0',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.35725.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.c4bf34f8-ad40-4af3-914e-a85e75a76bed',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.35725.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.c4bf34f8-ad40-4af3-914e-a85e75a76bed',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -270,7 +270,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/1',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.64736.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6491fb2f-52e7-4129-bcbd-d23a67117ae0',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.64736.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6491fb2f-52e7-4129-bcbd-d23a67117ae0',
|
||||
'title': 'BrandedKeyArt',
|
||||
}),
|
||||
dict({
|
||||
@@ -281,7 +281,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/2',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.55545.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.4c2daefb-fbf6-4b90-b392-bf8ecc39a92e',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.55545.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.4c2daefb-fbf6-4b90-b392-bf8ecc39a92e',
|
||||
'title': 'TitledHeroArt',
|
||||
}),
|
||||
dict({
|
||||
@@ -292,7 +292,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/3',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.22570.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.bf29284d-808a-4e4a-beaa-6621c9898d0e',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.22570.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.bf29284d-808a-4e4a-beaa-6621c9898d0e',
|
||||
'title': 'Poster',
|
||||
}),
|
||||
dict({
|
||||
@@ -303,7 +303,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/4',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.55545.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.4c2daefb-fbf6-4b90-b392-bf8ecc39a92e',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.55545.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.4c2daefb-fbf6-4b90-b392-bf8ecc39a92e',
|
||||
'title': 'SuperHeroArt',
|
||||
}),
|
||||
dict({
|
||||
@@ -314,7 +314,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/5',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.45451.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.3abf2cc3-00cc-417d-a93d-97110cdfb261',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.45451.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.3abf2cc3-00cc-417d-a93d-97110cdfb261',
|
||||
'title': 'BoxArt',
|
||||
}),
|
||||
dict({
|
||||
@@ -325,7 +325,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/6',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.35072.13670972585585116.70570f0d-17aa-4f97-b692-5412fa183673.25a97451-9369-4f6b-b66b-3427913235eb',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.35072.13670972585585116.70570f0d-17aa-4f97-b692-5412fa183673.25a97451-9369-4f6b-b66b-3427913235eb',
|
||||
'title': 'Logo',
|
||||
}),
|
||||
dict({
|
||||
@@ -336,7 +336,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/7',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.45451.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.3abf2cc3-00cc-417d-a93d-97110cdfb261',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.45451.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.3abf2cc3-00cc-417d-a93d-97110cdfb261',
|
||||
'title': 'FeaturePromotionalSquareArt',
|
||||
}),
|
||||
dict({
|
||||
@@ -347,7 +347,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/8',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.38628.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.c2a205af-5146-405b-b2b7-56845351f1f3',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.38628.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.c2a205af-5146-405b-b2b7-56845351f1f3',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -358,7 +358,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/9',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.22150.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.b147895c-e947-424d-a731-faefc8c9906a',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.22150.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.b147895c-e947-424d-a731-faefc8c9906a',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -369,7 +369,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/10',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.37559.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.479d2dc1-db2d-4ffa-8c54-a2bebb093ec6',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.37559.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.479d2dc1-db2d-4ffa-8c54-a2bebb093ec6',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -380,7 +380,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/11',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.32737.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6a16ae3e-2918-46e9-90d9-232c79cb9d9d',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.32737.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6a16ae3e-2918-46e9-90d9-232c79cb9d9d',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -391,7 +391,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/12',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.57046.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.0c0dd072-aa27-4e83-9010-474dfbb42277',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.57046.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.0c0dd072-aa27-4e83-9010-474dfbb42277',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -402,7 +402,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/13',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.19315.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6293a7b7-07ca-4df0-9eea-6018285a0a8d',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.19315.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6293a7b7-07ca-4df0-9eea-6018285a0a8d',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -413,7 +413,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/14',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.23374.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.66498a73-52f5-4247-a1e2-d3c84b9b315d',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.23374.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.66498a73-52f5-4247-a1e2-d3c84b9b315d',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -424,7 +424,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/15',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.64646.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.83182b76-4294-496d-90a7-f4e31e7aa80a',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.64646.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.83182b76-4294-496d-90a7-f4e31e7aa80a',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -435,7 +435,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/16',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.24470.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.72d2abc3-aa69-4aeb-960b-6f6d25f498e4',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.24470.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.72d2abc3-aa69-4aeb-960b-6f6d25f498e4',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -446,7 +446,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/17',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.15604.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.27cee011-660b-49a4-bd33-38db6fff5226',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.15604.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.27cee011-660b-49a4-bd33-38db6fff5226',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -457,7 +457,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/18',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.39987.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.be285efe-78f8-4984-9d28-9159881bacd4',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.39987.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.be285efe-78f8-4984-9d28-9159881bacd4',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -468,7 +468,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/19',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.38206.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.2409803d-7378-4a69-a10b-1574ac42b98b',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.38206.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.2409803d-7378-4a69-a10b-1574ac42b98b',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -479,7 +479,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/20',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.14938.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.ef6ee72c-4beb-45ec-bd10-6235bd6a7c7f',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.14938.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.ef6ee72c-4beb-45ec-bd10-6235bd6a7c7f',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -490,7 +490,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/21',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.12835.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6165ee24-df01-44f5-80fe-7411f9366d1c',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.12835.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6165ee24-df01-44f5-80fe-7411f9366d1c',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -501,7 +501,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/22',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.40786.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.b7607a0d-0101-4864-9bf8-ad889f820489',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.40786.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.b7607a0d-0101-4864-9bf8-ad889f820489',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
dict({
|
||||
@@ -512,7 +512,7 @@
|
||||
'media_class': <MediaClass.VIDEO: 'video'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135/game_media/23',
|
||||
'media_content_type': <MediaClass.VIDEO: 'video'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.55686.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.ecbb0e91-36a9-4f76-ab1e-5a5de009840e',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.55686.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.ecbb0e91-36a9-4f76-ab1e-5a5de009840e',
|
||||
'title': 'Screenshot',
|
||||
}),
|
||||
]),
|
||||
|
||||
Reference in New Issue
Block a user