mirror of
https://github.com/home-assistant/core.git
synced 2025-07-30 00:27:19 +00:00
Merge branch 'dev' into fix-microsign-alt1
This commit is contained in:
commit
cd61fc93a0
@ -7,7 +7,7 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pyenphase"],
|
"loggers": ["pyenphase"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["pyenphase==2.2.0"],
|
"requirements": ["pyenphase==2.2.1"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_enphase-envoy._tcp.local."
|
"type": "_enphase-envoy._tcp.local."
|
||||||
|
@ -7,6 +7,7 @@ from typing import Final
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.components.zone import condition as zone_condition
|
||||||
from homeassistant.const import CONF_EVENT, CONF_PLATFORM, CONF_SOURCE, CONF_ZONE
|
from homeassistant.const import CONF_EVENT, CONF_PLATFORM, CONF_SOURCE, CONF_ZONE
|
||||||
from homeassistant.core import (
|
from homeassistant.core import (
|
||||||
CALLBACK_TYPE,
|
CALLBACK_TYPE,
|
||||||
@ -17,7 +18,7 @@ from homeassistant.core import (
|
|||||||
State,
|
State,
|
||||||
callback,
|
callback,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers import condition, config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.config_validation import entity_domain
|
from homeassistant.helpers.config_validation import entity_domain
|
||||||
from homeassistant.helpers.event import TrackStates, async_track_state_change_filtered
|
from homeassistant.helpers.event import TrackStates, async_track_state_change_filtered
|
||||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||||
@ -79,9 +80,11 @@ async def async_attach_trigger(
|
|||||||
return
|
return
|
||||||
|
|
||||||
from_match = (
|
from_match = (
|
||||||
condition.zone(hass, zone_state, from_state) if from_state else False
|
zone_condition.zone(hass, zone_state, from_state) if from_state else False
|
||||||
|
)
|
||||||
|
to_match = (
|
||||||
|
zone_condition.zone(hass, zone_state, to_state) if to_state else False
|
||||||
)
|
)
|
||||||
to_match = condition.zone(hass, zone_state, to_state) if to_state else False
|
|
||||||
|
|
||||||
if (trigger_event == EVENT_ENTER and not from_match and to_match) or (
|
if (trigger_event == EVENT_ENTER and not from_match and to_match) or (
|
||||||
trigger_event == EVENT_LEAVE and from_match and not to_match
|
trigger_event == EVENT_LEAVE and from_match and not to_match
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["dacite", "gios"],
|
"loggers": ["dacite", "gios"],
|
||||||
"requirements": ["gios==6.0.0"]
|
"requirements": ["gios==6.1.0"]
|
||||||
}
|
}
|
||||||
|
@ -80,10 +80,10 @@ async def async_send_text_commands(
|
|||||||
|
|
||||||
credentials = Credentials(session.token[CONF_ACCESS_TOKEN]) # type: ignore[no-untyped-call]
|
credentials = Credentials(session.token[CONF_ACCESS_TOKEN]) # type: ignore[no-untyped-call]
|
||||||
language_code = entry.options.get(CONF_LANGUAGE_CODE, default_language_code(hass))
|
language_code = entry.options.get(CONF_LANGUAGE_CODE, default_language_code(hass))
|
||||||
|
command_response_list = []
|
||||||
with TextAssistant(
|
with TextAssistant(
|
||||||
credentials, language_code, audio_out=bool(media_players)
|
credentials, language_code, audio_out=bool(media_players)
|
||||||
) as assistant:
|
) as assistant:
|
||||||
command_response_list = []
|
|
||||||
for command in commands:
|
for command in commands:
|
||||||
try:
|
try:
|
||||||
resp = await hass.async_add_executor_job(assistant.assist, command)
|
resp = await hass.async_add_executor_job(assistant.assist, command)
|
||||||
@ -117,7 +117,7 @@ async def async_send_text_commands(
|
|||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
command_response_list.append(CommandResponse(text_response))
|
command_response_list.append(CommandResponse(text_response))
|
||||||
return command_response_list
|
return command_response_list
|
||||||
|
|
||||||
|
|
||||||
def default_language_code(hass: HomeAssistant) -> str:
|
def default_language_code(hass: HomeAssistant) -> str:
|
||||||
|
@ -7,6 +7,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk",
|
"documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["gassist-text==0.0.12"],
|
"requirements": ["gassist-text==0.0.14"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -44,11 +44,14 @@ from homeassistant.helpers.entity_component import async_update_entity
|
|||||||
from homeassistant.helpers.issue_registry import IssueSeverity
|
from homeassistant.helpers.issue_registry import IssueSeverity
|
||||||
from homeassistant.helpers.service import (
|
from homeassistant.helpers.service import (
|
||||||
async_extract_config_entry_ids,
|
async_extract_config_entry_ids,
|
||||||
async_extract_referenced_entity_ids,
|
|
||||||
async_register_admin_service,
|
async_register_admin_service,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.signal import KEY_HA_STOP
|
from homeassistant.helpers.signal import KEY_HA_STOP
|
||||||
from homeassistant.helpers.system_info import async_get_system_info
|
from homeassistant.helpers.system_info import async_get_system_info
|
||||||
|
from homeassistant.helpers.target import (
|
||||||
|
TargetSelectorData,
|
||||||
|
async_extract_referenced_entity_ids,
|
||||||
|
)
|
||||||
from homeassistant.helpers.template import async_load_custom_templates
|
from homeassistant.helpers.template import async_load_custom_templates
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
@ -111,7 +114,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
|||||||
|
|
||||||
async def async_handle_turn_service(service: ServiceCall) -> None:
|
async def async_handle_turn_service(service: ServiceCall) -> None:
|
||||||
"""Handle calls to homeassistant.turn_on/off."""
|
"""Handle calls to homeassistant.turn_on/off."""
|
||||||
referenced = async_extract_referenced_entity_ids(hass, service)
|
referenced = async_extract_referenced_entity_ids(
|
||||||
|
hass, TargetSelectorData(service.data)
|
||||||
|
)
|
||||||
all_referenced = referenced.referenced | referenced.indirectly_referenced
|
all_referenced = referenced.referenced | referenced.indirectly_referenced
|
||||||
|
|
||||||
# Generic turn on/off method requires entity id
|
# Generic turn on/off method requires entity id
|
||||||
|
@ -75,11 +75,12 @@ from homeassistant.helpers.entityfilter import (
|
|||||||
EntityFilter,
|
EntityFilter,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||||
from homeassistant.helpers.service import (
|
from homeassistant.helpers.service import async_register_admin_service
|
||||||
async_extract_referenced_entity_ids,
|
|
||||||
async_register_admin_service,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers.start import async_at_started
|
from homeassistant.helpers.start import async_at_started
|
||||||
|
from homeassistant.helpers.target import (
|
||||||
|
TargetSelectorData,
|
||||||
|
async_extract_referenced_entity_ids,
|
||||||
|
)
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.loader import IntegrationNotFound, async_get_integration
|
from homeassistant.loader import IntegrationNotFound, async_get_integration
|
||||||
from homeassistant.util.async_ import create_eager_task
|
from homeassistant.util.async_ import create_eager_task
|
||||||
@ -482,7 +483,9 @@ def _async_register_events_and_services(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
async def async_handle_homekit_unpair(service: ServiceCall) -> None:
|
async def async_handle_homekit_unpair(service: ServiceCall) -> None:
|
||||||
"""Handle unpair HomeKit service call."""
|
"""Handle unpair HomeKit service call."""
|
||||||
referenced = async_extract_referenced_entity_ids(hass, service)
|
referenced = async_extract_referenced_entity_ids(
|
||||||
|
hass, TargetSelectorData(service.data)
|
||||||
|
)
|
||||||
dev_reg = dr.async_get(hass)
|
dev_reg = dr.async_get(hass)
|
||||||
for device_id in referenced.referenced_devices:
|
for device_id in referenced.referenced_devices:
|
||||||
if not (dev_reg_ent := dev_reg.async_get(device_id)):
|
if not (dev_reg_ent := dev_reg.async_get(device_id)):
|
||||||
|
@ -37,12 +37,7 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import Event, HomeAssistant, callback
|
from homeassistant.core import Event, HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import (
|
from homeassistant.helpers import config_validation as cv, issue_registry as ir, storage
|
||||||
config_validation as cv,
|
|
||||||
frame,
|
|
||||||
issue_registry as ir,
|
|
||||||
storage,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers.http import (
|
from homeassistant.helpers.http import (
|
||||||
KEY_ALLOW_CONFIGURED_CORS,
|
KEY_ALLOW_CONFIGURED_CORS,
|
||||||
KEY_AUTHENTICATED, # noqa: F401
|
KEY_AUTHENTICATED, # noqa: F401
|
||||||
@ -505,25 +500,6 @@ class HomeAssistantHTTP:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def register_static_path(
|
|
||||||
self, url_path: str, path: str, cache_headers: bool = True
|
|
||||||
) -> None:
|
|
||||||
"""Register a folder or file to serve as a static path."""
|
|
||||||
frame.report_usage(
|
|
||||||
"calls hass.http.register_static_path which "
|
|
||||||
"does blocking I/O in the event loop, instead "
|
|
||||||
"call `await hass.http.async_register_static_paths("
|
|
||||||
f'[StaticPathConfig("{url_path}", "{path}", {cache_headers})])`',
|
|
||||||
exclude_integrations={"http"},
|
|
||||||
core_behavior=frame.ReportBehavior.ERROR,
|
|
||||||
core_integration_behavior=frame.ReportBehavior.ERROR,
|
|
||||||
custom_integration_behavior=frame.ReportBehavior.ERROR,
|
|
||||||
breaks_in_ha_version="2025.7",
|
|
||||||
)
|
|
||||||
configs = [StaticPathConfig(url_path, path, cache_headers)]
|
|
||||||
resources = self._make_static_resources(configs)
|
|
||||||
self._async_register_static_paths(configs, resources)
|
|
||||||
|
|
||||||
def _create_ssl_context(self) -> ssl.SSLContext | None:
|
def _create_ssl_context(self) -> ssl.SSLContext | None:
|
||||||
context: ssl.SSLContext | None = None
|
context: ssl.SSLContext | None = None
|
||||||
assert self.ssl_certificate is not None
|
assert self.ssl_certificate is not None
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"title": "Configure Iskra Device",
|
"title": "Configure Iskra device",
|
||||||
"description": "Enter the IP address of your Iskra Device and select protocol.",
|
"description": "Enter the IP address of your Iskra device and select protocol.",
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]"
|
"host": "[%key:common::config_flow::data::host%]"
|
||||||
},
|
},
|
||||||
@ -12,7 +12,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"authentication": {
|
"authentication": {
|
||||||
"title": "Configure Rest API Credentials",
|
"title": "Configure REST API credentials",
|
||||||
"description": "Enter username and password",
|
"description": "Enter username and password",
|
||||||
"data": {
|
"data": {
|
||||||
"username": "[%key:common::config_flow::data::username%]",
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
@ -44,7 +44,7 @@
|
|||||||
"selector": {
|
"selector": {
|
||||||
"protocol": {
|
"protocol": {
|
||||||
"options": {
|
"options": {
|
||||||
"rest_api": "Rest API",
|
"rest_api": "REST API",
|
||||||
"modbus_tcp": "Modbus TCP"
|
"modbus_tcp": "Modbus TCP"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,10 @@ from homeassistant.components.light import (
|
|||||||
from homeassistant.const import ATTR_MODE
|
from homeassistant.const import ATTR_MODE
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.service import async_extract_referenced_entity_ids
|
from homeassistant.helpers.target import (
|
||||||
|
TargetSelectorData,
|
||||||
|
async_extract_referenced_entity_ids,
|
||||||
|
)
|
||||||
|
|
||||||
from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DOMAIN
|
from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DOMAIN
|
||||||
from .coordinator import LIFXConfigEntry, LIFXUpdateCoordinator
|
from .coordinator import LIFXConfigEntry, LIFXUpdateCoordinator
|
||||||
@ -268,7 +271,9 @@ class LIFXManager:
|
|||||||
|
|
||||||
async def service_handler(service: ServiceCall) -> None:
|
async def service_handler(service: ServiceCall) -> None:
|
||||||
"""Apply a service, i.e. start an effect."""
|
"""Apply a service, i.e. start an effect."""
|
||||||
referenced = async_extract_referenced_entity_ids(self.hass, service)
|
referenced = async_extract_referenced_entity_ids(
|
||||||
|
self.hass, TargetSelectorData(service.data)
|
||||||
|
)
|
||||||
all_referenced = referenced.referenced | referenced.indirectly_referenced
|
all_referenced = referenced.referenced | referenced.indirectly_referenced
|
||||||
if all_referenced:
|
if all_referenced:
|
||||||
await self.start_effect(all_referenced, service.service, **service.data)
|
await self.start_effect(all_referenced, service.service, **service.data)
|
||||||
@ -499,6 +504,5 @@ class LIFXManager:
|
|||||||
if self.entry_id_to_entity_id[entry.entry_id] in entity_ids:
|
if self.entry_id_to_entity_id[entry.entry_id] in entity_ids:
|
||||||
coordinators.append(entry.runtime_data)
|
coordinators.append(entry.runtime_data)
|
||||||
bulbs.append(entry.runtime_data.device)
|
bulbs.append(entry.runtime_data.device)
|
||||||
|
|
||||||
if start_effect_func := self._effect_dispatch.get(service):
|
if start_effect_func := self._effect_dispatch.get(service):
|
||||||
await start_effect_func(self, bulbs, coordinators, **kwargs)
|
await start_effect_func(self, bulbs, coordinators, **kwargs)
|
||||||
|
@ -309,7 +309,7 @@ DISCOVERY_SCHEMAS = [
|
|||||||
platform=Platform.BINARY_SENSOR,
|
platform=Platform.BINARY_SENSOR,
|
||||||
entity_description=MatterBinarySensorEntityDescription(
|
entity_description=MatterBinarySensorEntityDescription(
|
||||||
key="EnergyEvseSupplyStateSensor",
|
key="EnergyEvseSupplyStateSensor",
|
||||||
translation_key="evse_supply_charging_state",
|
translation_key="evse_supply_state",
|
||||||
device_class=BinarySensorDeviceClass.RUNNING,
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
device_to_ha={
|
device_to_ha={
|
||||||
clusters.EnergyEvse.Enums.SupplyStateEnum.kDisabled: False,
|
clusters.EnergyEvse.Enums.SupplyStateEnum.kDisabled: False,
|
||||||
|
@ -83,8 +83,8 @@
|
|||||||
"evse_plug": {
|
"evse_plug": {
|
||||||
"name": "Plug state"
|
"name": "Plug state"
|
||||||
},
|
},
|
||||||
"evse_supply_charging_state": {
|
"evse_supply_state": {
|
||||||
"name": "Supply charging state"
|
"name": "Charger supply state"
|
||||||
},
|
},
|
||||||
"boost_state": {
|
"boost_state": {
|
||||||
"name": "Boost state"
|
"name": "Boost state"
|
||||||
|
@ -10,12 +10,19 @@ from typing import Any
|
|||||||
|
|
||||||
from nibe.coil import Coil, CoilData
|
from nibe.coil import Coil, CoilData
|
||||||
from nibe.connection import Connection
|
from nibe.connection import Connection
|
||||||
from nibe.exceptions import CoilNotFoundException, ReadException
|
from nibe.exceptions import (
|
||||||
|
CoilNotFoundException,
|
||||||
|
ReadException,
|
||||||
|
WriteDeniedException,
|
||||||
|
WriteException,
|
||||||
|
WriteTimeoutException,
|
||||||
|
)
|
||||||
from nibe.heatpump import HeatPump, Series
|
from nibe.heatpump import HeatPump, Series
|
||||||
from propcache.api import cached_property
|
from propcache.api import cached_property
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
@ -134,7 +141,36 @@ class CoilCoordinator(ContextCoordinator[dict[int, CoilData], int]):
|
|||||||
async def async_write_coil(self, coil: Coil, value: float | str) -> None:
|
async def async_write_coil(self, coil: Coil, value: float | str) -> None:
|
||||||
"""Write coil and update state."""
|
"""Write coil and update state."""
|
||||||
data = CoilData(coil, value)
|
data = CoilData(coil, value)
|
||||||
await self.connection.write_coil(data)
|
try:
|
||||||
|
await self.connection.write_coil(data)
|
||||||
|
except WriteDeniedException as e:
|
||||||
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="write_denied",
|
||||||
|
translation_placeholders={
|
||||||
|
"address": str(coil.address),
|
||||||
|
"value": str(value),
|
||||||
|
},
|
||||||
|
) from e
|
||||||
|
except WriteTimeoutException as e:
|
||||||
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="write_timeout",
|
||||||
|
translation_placeholders={
|
||||||
|
"address": str(coil.address),
|
||||||
|
},
|
||||||
|
) from e
|
||||||
|
except WriteException as e:
|
||||||
|
LOGGER.debug("Failed to write", exc_info=True)
|
||||||
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="write_failed",
|
||||||
|
translation_placeholders={
|
||||||
|
"address": str(coil.address),
|
||||||
|
"value": str(value),
|
||||||
|
"error": str(e),
|
||||||
|
},
|
||||||
|
) from e
|
||||||
|
|
||||||
self.data[coil.address] = data
|
self.data[coil.address] = data
|
||||||
|
|
||||||
|
@ -45,5 +45,16 @@
|
|||||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||||
"url": "The specified URL is not well formed nor supported"
|
"url": "The specified URL is not well formed nor supported"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"write_denied": {
|
||||||
|
"message": "Writing of coil {address} with value `{value}` was denied"
|
||||||
|
},
|
||||||
|
"write_timeout": {
|
||||||
|
"message": "Timeout while writing coil {address}"
|
||||||
|
},
|
||||||
|
"write_failed": {
|
||||||
|
"message": "Writing of coil {address} with value `{value}` failed with error `{error}`"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/openai_conversation",
|
"documentation": "https://www.home-assistant.io/integrations/openai_conversation",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["openai==1.76.2"]
|
"requirements": ["openai==1.93.0"]
|
||||||
}
|
}
|
||||||
|
@ -115,6 +115,16 @@ class RestData:
|
|||||||
for key, value in rendered_params.items():
|
for key, value in rendered_params.items():
|
||||||
if isinstance(value, bool):
|
if isinstance(value, bool):
|
||||||
rendered_params[key] = str(value).lower()
|
rendered_params[key] = str(value).lower()
|
||||||
|
elif not isinstance(value, (str, int, float, type(None))):
|
||||||
|
# For backward compatibility with httpx behavior, convert non-primitive
|
||||||
|
# types to strings. This maintains compatibility after switching from
|
||||||
|
# httpx to aiohttp. See https://github.com/home-assistant/core/issues/148153
|
||||||
|
_LOGGER.debug(
|
||||||
|
"REST query parameter '%s' has type %s, converting to string",
|
||||||
|
key,
|
||||||
|
type(value).__name__,
|
||||||
|
)
|
||||||
|
rendered_params[key] = str(value)
|
||||||
|
|
||||||
_LOGGER.debug("Updating from %s", self._resource)
|
_LOGGER.debug("Updating from %s", self._resource)
|
||||||
# Create request kwargs
|
# Create request kwargs
|
||||||
@ -140,7 +150,14 @@ class RestData:
|
|||||||
self._method, self._resource, **request_kwargs
|
self._method, self._resource, **request_kwargs
|
||||||
) as response:
|
) as response:
|
||||||
# Read the response
|
# Read the response
|
||||||
self.data = await response.text(encoding=self._encoding)
|
# Only use configured encoding if no charset in Content-Type header
|
||||||
|
# If charset is present in Content-Type, let aiohttp use it
|
||||||
|
if response.charset:
|
||||||
|
# Let aiohttp use the charset from Content-Type header
|
||||||
|
self.data = await response.text()
|
||||||
|
else:
|
||||||
|
# Use configured encoding as fallback
|
||||||
|
self.data = await response.text(encoding=self._encoding)
|
||||||
self.headers = response.headers
|
self.headers = response.headers
|
||||||
|
|
||||||
except TimeoutError as ex:
|
except TimeoutError as ex:
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
|
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["sharkiq"],
|
"loggers": ["sharkiq"],
|
||||||
"requirements": ["sharkiq==1.1.0"]
|
"requirements": ["sharkiq==1.1.1"]
|
||||||
}
|
}
|
||||||
|
@ -41,5 +41,5 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["switchbot"],
|
"loggers": ["switchbot"],
|
||||||
"quality_scale": "gold",
|
"quality_scale": "gold",
|
||||||
"requirements": ["PySwitchbot==0.67.0"]
|
"requirements": ["PySwitchbot==0.68.1"]
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,10 @@ from homeassistant.helpers import (
|
|||||||
device_registry as dr,
|
device_registry as dr,
|
||||||
entity_registry as er,
|
entity_registry as er,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.service import async_extract_referenced_entity_ids
|
from homeassistant.helpers.target import (
|
||||||
|
TargetSelectorData,
|
||||||
|
async_extract_referenced_entity_ids,
|
||||||
|
)
|
||||||
from homeassistant.util.json import JsonValueType
|
from homeassistant.util.json import JsonValueType
|
||||||
from homeassistant.util.read_only_dict import ReadOnlyDict
|
from homeassistant.util.read_only_dict import ReadOnlyDict
|
||||||
|
|
||||||
@ -115,7 +118,7 @@ def _async_get_ufp_instance(hass: HomeAssistant, device_id: str) -> ProtectApiCl
|
|||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_get_ufp_camera(call: ServiceCall) -> Camera:
|
def _async_get_ufp_camera(call: ServiceCall) -> Camera:
|
||||||
ref = async_extract_referenced_entity_ids(call.hass, call)
|
ref = async_extract_referenced_entity_ids(call.hass, TargetSelectorData(call.data))
|
||||||
entity_registry = er.async_get(call.hass)
|
entity_registry = er.async_get(call.hass)
|
||||||
|
|
||||||
entity_id = ref.indirectly_referenced.pop()
|
entity_id = ref.indirectly_referenced.pop()
|
||||||
@ -133,7 +136,7 @@ def _async_get_protect_from_call(call: ServiceCall) -> set[ProtectApiClient]:
|
|||||||
return {
|
return {
|
||||||
_async_get_ufp_instance(call.hass, device_id)
|
_async_get_ufp_instance(call.hass, device_id)
|
||||||
for device_id in async_extract_referenced_entity_ids(
|
for device_id in async_extract_referenced_entity_ids(
|
||||||
call.hass, call
|
call.hass, TargetSelectorData(call.data)
|
||||||
).referenced_devices
|
).referenced_devices
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,7 +199,7 @@ def _async_unique_id_to_mac(unique_id: str) -> str:
|
|||||||
|
|
||||||
async def set_chime_paired_doorbells(call: ServiceCall) -> None:
|
async def set_chime_paired_doorbells(call: ServiceCall) -> None:
|
||||||
"""Set paired doorbells on chime."""
|
"""Set paired doorbells on chime."""
|
||||||
ref = async_extract_referenced_entity_ids(call.hass, call)
|
ref = async_extract_referenced_entity_ids(call.hass, TargetSelectorData(call.data))
|
||||||
entity_registry = er.async_get(call.hass)
|
entity_registry = er.async_get(call.hass)
|
||||||
|
|
||||||
entity_id = ref.indirectly_referenced.pop()
|
entity_id = ref.indirectly_referenced.pop()
|
||||||
@ -211,7 +214,9 @@ async def set_chime_paired_doorbells(call: ServiceCall) -> None:
|
|||||||
assert chime is not None
|
assert chime is not None
|
||||||
|
|
||||||
call.data = ReadOnlyDict(call.data.get("doorbells") or {})
|
call.data = ReadOnlyDict(call.data.get("doorbells") or {})
|
||||||
doorbell_refs = async_extract_referenced_entity_ids(call.hass, call)
|
doorbell_refs = async_extract_referenced_entity_ids(
|
||||||
|
call.hass, TargetSelectorData(call.data)
|
||||||
|
)
|
||||||
doorbell_ids: set[str] = set()
|
doorbell_ids: set[str] = set()
|
||||||
for camera_id in doorbell_refs.referenced | doorbell_refs.indirectly_referenced:
|
for camera_id in doorbell_refs.referenced | doorbell_refs.indirectly_referenced:
|
||||||
doorbell_sensor = entity_registry.async_get(camera_id)
|
doorbell_sensor = entity_registry.async_get(camera_id)
|
||||||
|
@ -321,16 +321,18 @@ class StateVacuumEntity(
|
|||||||
|
|
||||||
Integrations should implement a sensor instead.
|
Integrations should implement a sensor instead.
|
||||||
"""
|
"""
|
||||||
report_usage(
|
if self.platform:
|
||||||
f"is setting the {property} which has been deprecated."
|
# Don't report usage until after entity added to hass, after init
|
||||||
f" Integration {self.platform.platform_name} should implement a sensor"
|
report_usage(
|
||||||
" instead with a correct device class and link it to the same device",
|
f"is setting the {property} which has been deprecated."
|
||||||
core_integration_behavior=ReportBehavior.LOG,
|
f" Integration {self.platform.platform_name} should implement a sensor"
|
||||||
custom_integration_behavior=ReportBehavior.LOG,
|
" instead with a correct device class and link it to the same device",
|
||||||
breaks_in_ha_version="2026.8",
|
core_integration_behavior=ReportBehavior.LOG,
|
||||||
integration_domain=self.platform.platform_name if self.platform else None,
|
custom_integration_behavior=ReportBehavior.LOG,
|
||||||
exclude_integrations={DOMAIN},
|
breaks_in_ha_version="2026.8",
|
||||||
)
|
integration_domain=self.platform.platform_name,
|
||||||
|
exclude_integrations={DOMAIN},
|
||||||
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _report_deprecated_battery_feature(self) -> None:
|
def _report_deprecated_battery_feature(self) -> None:
|
||||||
@ -339,17 +341,19 @@ class StateVacuumEntity(
|
|||||||
Integrations should remove the battery supported feature when migrating
|
Integrations should remove the battery supported feature when migrating
|
||||||
battery level and icon to a sensor.
|
battery level and icon to a sensor.
|
||||||
"""
|
"""
|
||||||
report_usage(
|
if self.platform:
|
||||||
f"is setting the battery supported feature which has been deprecated."
|
# Don't report usage until after entity added to hass, after init
|
||||||
f" Integration {self.platform.platform_name} should remove this as part of migrating"
|
report_usage(
|
||||||
" the battery level and icon to a sensor",
|
f"is setting the battery supported feature which has been deprecated."
|
||||||
core_behavior=ReportBehavior.LOG,
|
f" Integration {self.platform.platform_name} should remove this as part of migrating"
|
||||||
core_integration_behavior=ReportBehavior.LOG,
|
" the battery level and icon to a sensor",
|
||||||
custom_integration_behavior=ReportBehavior.LOG,
|
core_behavior=ReportBehavior.LOG,
|
||||||
breaks_in_ha_version="2026.8",
|
core_integration_behavior=ReportBehavior.LOG,
|
||||||
integration_domain=self.platform.platform_name if self.platform else None,
|
custom_integration_behavior=ReportBehavior.LOG,
|
||||||
exclude_integrations={DOMAIN},
|
breaks_in_ha_version="2026.8",
|
||||||
)
|
integration_domain=self.platform.platform_name,
|
||||||
|
exclude_integrations={DOMAIN},
|
||||||
|
)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def battery_level(self) -> int | None:
|
def battery_level(self) -> int | None:
|
||||||
|
@ -9,7 +9,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
|
|
||||||
from .const import DOMAIN, UPDATE_INTERVAL
|
from .const import UPDATE_INTERVAL
|
||||||
from .coordinator import InvalidAuth, WallboxCoordinator, async_validate_input
|
from .coordinator import InvalidAuth, WallboxCoordinator, async_validate_input
|
||||||
|
|
||||||
PLATFORMS = [
|
PLATFORMS = [
|
||||||
@ -20,8 +20,10 @@ PLATFORMS = [
|
|||||||
Platform.SWITCH,
|
Platform.SWITCH,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
type WallboxConfigEntry = ConfigEntry[WallboxCoordinator]
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: WallboxConfigEntry) -> bool:
|
||||||
"""Set up Wallbox from a config entry."""
|
"""Set up Wallbox from a config entry."""
|
||||||
wallbox = Wallbox(
|
wallbox = Wallbox(
|
||||||
entry.data[CONF_USERNAME],
|
entry.data[CONF_USERNAME],
|
||||||
@ -36,7 +38,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
wallbox_coordinator = WallboxCoordinator(hass, entry, wallbox)
|
wallbox_coordinator = WallboxCoordinator(hass, entry, wallbox)
|
||||||
await wallbox_coordinator.async_config_entry_first_refresh()
|
await wallbox_coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = wallbox_coordinator
|
entry.runtime_data = wallbox_coordinator
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
@ -45,8 +47,4 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
if unload_ok:
|
|
||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
|
||||||
|
|
||||||
return unload_ok
|
|
||||||
|
@ -222,7 +222,9 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||||||
return data # noqa: TRY300
|
return data # noqa: TRY300
|
||||||
except requests.exceptions.HTTPError as wallbox_connection_error:
|
except requests.exceptions.HTTPError as wallbox_connection_error:
|
||||||
if wallbox_connection_error.response.status_code == 403:
|
if wallbox_connection_error.response.status_code == 403:
|
||||||
raise InvalidAuth from wallbox_connection_error
|
raise InvalidAuth(
|
||||||
|
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||||
|
) from wallbox_connection_error
|
||||||
if wallbox_connection_error.response.status_code == 429:
|
if wallbox_connection_error.response.status_code == 429:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN, translation_key="too_many_requests"
|
translation_domain=DOMAIN, translation_key="too_many_requests"
|
||||||
@ -248,7 +250,9 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||||||
return data # noqa: TRY300
|
return data # noqa: TRY300
|
||||||
except requests.exceptions.HTTPError as wallbox_connection_error:
|
except requests.exceptions.HTTPError as wallbox_connection_error:
|
||||||
if wallbox_connection_error.response.status_code == 403:
|
if wallbox_connection_error.response.status_code == 403:
|
||||||
raise InvalidAuth from wallbox_connection_error
|
raise InvalidAuth(
|
||||||
|
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||||
|
) from wallbox_connection_error
|
||||||
if wallbox_connection_error.response.status_code == 429:
|
if wallbox_connection_error.response.status_code == 429:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN, translation_key="too_many_requests"
|
translation_domain=DOMAIN, translation_key="too_many_requests"
|
||||||
@ -303,7 +307,9 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||||||
return data # noqa: TRY300
|
return data # noqa: TRY300
|
||||||
except requests.exceptions.HTTPError as wallbox_connection_error:
|
except requests.exceptions.HTTPError as wallbox_connection_error:
|
||||||
if wallbox_connection_error.response.status_code == 403:
|
if wallbox_connection_error.response.status_code == 403:
|
||||||
raise InvalidAuth from wallbox_connection_error
|
raise InvalidAuth(
|
||||||
|
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||||
|
) from wallbox_connection_error
|
||||||
if wallbox_connection_error.response.status_code == 429:
|
if wallbox_connection_error.response.status_code == 429:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN, translation_key="too_many_requests"
|
translation_domain=DOMAIN, translation_key="too_many_requests"
|
||||||
|
@ -13,7 +13,6 @@ from .const import (
|
|||||||
CHARGER_DATA_KEY,
|
CHARGER_DATA_KEY,
|
||||||
CHARGER_LOCKED_UNLOCKED_KEY,
|
CHARGER_LOCKED_UNLOCKED_KEY,
|
||||||
CHARGER_SERIAL_NUMBER_KEY,
|
CHARGER_SERIAL_NUMBER_KEY,
|
||||||
DOMAIN,
|
|
||||||
)
|
)
|
||||||
from .coordinator import WallboxCoordinator
|
from .coordinator import WallboxCoordinator
|
||||||
from .entity import WallboxEntity
|
from .entity import WallboxEntity
|
||||||
@ -32,7 +31,7 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create wallbox lock entities in HASS."""
|
"""Create wallbox lock entities in HASS."""
|
||||||
coordinator: WallboxCoordinator = hass.data[DOMAIN][entry.entry_id]
|
coordinator: WallboxCoordinator = entry.runtime_data
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
WallboxLock(coordinator, description)
|
WallboxLock(coordinator, description)
|
||||||
for ent in coordinator.data
|
for ent in coordinator.data
|
||||||
@ -40,6 +39,10 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
class WallboxLock(WallboxEntity, LockEntity):
|
class WallboxLock(WallboxEntity, LockEntity):
|
||||||
"""Representation of a wallbox lock."""
|
"""Representation of a wallbox lock."""
|
||||||
|
|
||||||
|
@ -23,7 +23,6 @@ from .const import (
|
|||||||
CHARGER_MAX_ICP_CURRENT_KEY,
|
CHARGER_MAX_ICP_CURRENT_KEY,
|
||||||
CHARGER_PART_NUMBER_KEY,
|
CHARGER_PART_NUMBER_KEY,
|
||||||
CHARGER_SERIAL_NUMBER_KEY,
|
CHARGER_SERIAL_NUMBER_KEY,
|
||||||
DOMAIN,
|
|
||||||
)
|
)
|
||||||
from .coordinator import WallboxCoordinator
|
from .coordinator import WallboxCoordinator
|
||||||
from .entity import WallboxEntity
|
from .entity import WallboxEntity
|
||||||
@ -84,7 +83,7 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create wallbox number entities in HASS."""
|
"""Create wallbox number entities in HASS."""
|
||||||
coordinator: WallboxCoordinator = hass.data[DOMAIN][entry.entry_id]
|
coordinator: WallboxCoordinator = entry.runtime_data
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
WallboxNumber(coordinator, entry, description)
|
WallboxNumber(coordinator, entry, description)
|
||||||
for ent in coordinator.data
|
for ent in coordinator.data
|
||||||
@ -92,6 +91,10 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
class WallboxNumber(WallboxEntity, NumberEntity):
|
class WallboxNumber(WallboxEntity, NumberEntity):
|
||||||
"""Representation of the Wallbox portal."""
|
"""Representation of the Wallbox portal."""
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create wallbox select entities in HASS."""
|
"""Create wallbox select entities in HASS."""
|
||||||
coordinator: WallboxCoordinator = hass.data[DOMAIN][entry.entry_id]
|
coordinator: WallboxCoordinator = entry.runtime_data
|
||||||
if coordinator.data[CHARGER_ECO_SMART_KEY] != EcoSmartMode.DISABLED:
|
if coordinator.data[CHARGER_ECO_SMART_KEY] != EcoSmartMode.DISABLED:
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
WallboxSelect(coordinator, description)
|
WallboxSelect(coordinator, description)
|
||||||
@ -74,6 +74,10 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
class WallboxSelect(WallboxEntity, SelectEntity):
|
class WallboxSelect(WallboxEntity, SelectEntity):
|
||||||
"""Representation of the Wallbox portal."""
|
"""Representation of the Wallbox portal."""
|
||||||
|
|
||||||
|
@ -43,7 +43,6 @@ from .const import (
|
|||||||
CHARGER_SERIAL_NUMBER_KEY,
|
CHARGER_SERIAL_NUMBER_KEY,
|
||||||
CHARGER_STATE_OF_CHARGE_KEY,
|
CHARGER_STATE_OF_CHARGE_KEY,
|
||||||
CHARGER_STATUS_DESCRIPTION_KEY,
|
CHARGER_STATUS_DESCRIPTION_KEY,
|
||||||
DOMAIN,
|
|
||||||
)
|
)
|
||||||
from .coordinator import WallboxCoordinator
|
from .coordinator import WallboxCoordinator
|
||||||
from .entity import WallboxEntity
|
from .entity import WallboxEntity
|
||||||
@ -174,7 +173,7 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create wallbox sensor entities in HASS."""
|
"""Create wallbox sensor entities in HASS."""
|
||||||
coordinator: WallboxCoordinator = hass.data[DOMAIN][entry.entry_id]
|
coordinator: WallboxCoordinator = entry.runtime_data
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
WallboxSensor(coordinator, description)
|
WallboxSensor(coordinator, description)
|
||||||
@ -183,6 +182,10 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
class WallboxSensor(WallboxEntity, SensorEntity):
|
class WallboxSensor(WallboxEntity, SensorEntity):
|
||||||
"""Representation of the Wallbox portal."""
|
"""Representation of the Wallbox portal."""
|
||||||
|
|
||||||
|
@ -3,9 +3,14 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"station": "Station Serial Number",
|
"station": "Station serial number",
|
||||||
"username": "[%key:common::config_flow::data::username%]",
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
"password": "[%key:common::config_flow::data::password%]"
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"station": "Serial number of the charger. Can be found in the Wallbox app or in the Wallbox portal.",
|
||||||
|
"username": "Username for your Wallbox account.",
|
||||||
|
"password": "Password for your Wallbox account."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"reauth_confirm": {
|
"reauth_confirm": {
|
||||||
@ -19,7 +24,7 @@
|
|||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||||
"reauth_invalid": "Re-authentication failed; Serial Number does not match original"
|
"reauth_invalid": "Re-authentication failed; serial number does not match original"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
@ -115,6 +120,9 @@
|
|||||||
},
|
},
|
||||||
"too_many_requests": {
|
"too_many_requests": {
|
||||||
"message": "Error communicating with Wallbox API, too many requests"
|
"message": "Error communicating with Wallbox API, too many requests"
|
||||||
|
},
|
||||||
|
"invalid_auth": {
|
||||||
|
"message": "Invalid authentication"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,6 @@ from .const import (
|
|||||||
CHARGER_PAUSE_RESUME_KEY,
|
CHARGER_PAUSE_RESUME_KEY,
|
||||||
CHARGER_SERIAL_NUMBER_KEY,
|
CHARGER_SERIAL_NUMBER_KEY,
|
||||||
CHARGER_STATUS_DESCRIPTION_KEY,
|
CHARGER_STATUS_DESCRIPTION_KEY,
|
||||||
DOMAIN,
|
|
||||||
ChargerStatus,
|
ChargerStatus,
|
||||||
)
|
)
|
||||||
from .coordinator import WallboxCoordinator
|
from .coordinator import WallboxCoordinator
|
||||||
@ -34,12 +33,16 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create wallbox sensor entities in HASS."""
|
"""Create wallbox sensor entities in HASS."""
|
||||||
coordinator: WallboxCoordinator = hass.data[DOMAIN][entry.entry_id]
|
coordinator: WallboxCoordinator = entry.runtime_data
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[WallboxSwitch(coordinator, SWITCH_TYPES[CHARGER_PAUSE_RESUME_KEY])]
|
[WallboxSwitch(coordinator, SWITCH_TYPES[CHARGER_PAUSE_RESUME_KEY])]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
class WallboxSwitch(WallboxEntity, SwitchEntity):
|
class WallboxSwitch(WallboxEntity, SwitchEntity):
|
||||||
"""Representation of the Wallbox portal."""
|
"""Representation of the Wallbox portal."""
|
||||||
|
|
||||||
|
@ -98,7 +98,10 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
data = {CONF_HOST: self._host, CONF_CLIENT_SECRET: client.client_key}
|
data = {CONF_HOST: self._host, CONF_CLIENT_SECRET: client.client_key}
|
||||||
|
|
||||||
if not self._name:
|
if not self._name:
|
||||||
self._name = f"{DEFAULT_NAME} {client.tv_info.system['modelName']}"
|
if model_name := client.tv_info.system.get("modelName"):
|
||||||
|
self._name = f"{DEFAULT_NAME} {model_name}"
|
||||||
|
else:
|
||||||
|
self._name = DEFAULT_NAME
|
||||||
return self.async_create_entry(title=self._name, data=data)
|
return self.async_create_entry(title=self._name, data=data)
|
||||||
|
|
||||||
return self.async_show_form(step_id="pairing", errors=errors)
|
return self.async_show_form(step_id="pairing", errors=errors)
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/webostv",
|
"documentation": "https://www.home-assistant.io/integrations/webostv",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["aiowebostv"],
|
"loggers": ["aiowebostv"],
|
||||||
"requirements": ["aiowebostv==0.7.3"],
|
"requirements": ["aiowebostv==0.7.4"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"st": "urn:lge-com:service:webos-second-screen:1"
|
"st": "urn:lge-com:service:webos-second-screen:1"
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"init": {
|
"init": {
|
||||||
"data": {
|
"data": {
|
||||||
"keep_master_light": "Keep main light, even with 1 LED segment."
|
"keep_master_light": "Add 'Main' control even with single LED segment"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
156
homeassistant/components/zone/condition.py
Normal file
156
homeassistant/components/zone/condition.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
"""Offer zone automation rules."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_GPS_ACCURACY,
|
||||||
|
ATTR_LATITUDE,
|
||||||
|
ATTR_LONGITUDE,
|
||||||
|
CONF_CONDITION,
|
||||||
|
CONF_ENTITY_ID,
|
||||||
|
CONF_ZONE,
|
||||||
|
STATE_UNAVAILABLE,
|
||||||
|
STATE_UNKNOWN,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, State
|
||||||
|
from homeassistant.exceptions import ConditionErrorContainer, ConditionErrorMessage
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.condition import (
|
||||||
|
Condition,
|
||||||
|
ConditionCheckerType,
|
||||||
|
trace_condition_function,
|
||||||
|
)
|
||||||
|
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||||
|
|
||||||
|
from . import in_zone
|
||||||
|
|
||||||
|
_CONDITION_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
**cv.CONDITION_BASE_SCHEMA,
|
||||||
|
vol.Required(CONF_CONDITION): "zone",
|
||||||
|
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||||
|
vol.Required("zone"): cv.entity_ids,
|
||||||
|
# To support use_trigger_value in automation
|
||||||
|
# Deprecated 2016/04/25
|
||||||
|
vol.Optional("event"): vol.Any("enter", "leave"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def zone(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
zone_ent: str | State | None,
|
||||||
|
entity: str | State | None,
|
||||||
|
) -> bool:
|
||||||
|
"""Test if zone-condition matches.
|
||||||
|
|
||||||
|
Async friendly.
|
||||||
|
"""
|
||||||
|
if zone_ent is None:
|
||||||
|
raise ConditionErrorMessage("zone", "no zone specified")
|
||||||
|
|
||||||
|
if isinstance(zone_ent, str):
|
||||||
|
zone_ent_id = zone_ent
|
||||||
|
|
||||||
|
if (zone_ent := hass.states.get(zone_ent)) is None:
|
||||||
|
raise ConditionErrorMessage("zone", f"unknown zone {zone_ent_id}")
|
||||||
|
|
||||||
|
if entity is None:
|
||||||
|
raise ConditionErrorMessage("zone", "no entity specified")
|
||||||
|
|
||||||
|
if isinstance(entity, str):
|
||||||
|
entity_id = entity
|
||||||
|
|
||||||
|
if (entity := hass.states.get(entity)) is None:
|
||||||
|
raise ConditionErrorMessage("zone", f"unknown entity {entity_id}")
|
||||||
|
else:
|
||||||
|
entity_id = entity.entity_id
|
||||||
|
|
||||||
|
if entity.state in (
|
||||||
|
STATE_UNAVAILABLE,
|
||||||
|
STATE_UNKNOWN,
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
latitude = entity.attributes.get(ATTR_LATITUDE)
|
||||||
|
longitude = entity.attributes.get(ATTR_LONGITUDE)
|
||||||
|
|
||||||
|
if latitude is None:
|
||||||
|
raise ConditionErrorMessage(
|
||||||
|
"zone", f"entity {entity_id} has no 'latitude' attribute"
|
||||||
|
)
|
||||||
|
|
||||||
|
if longitude is None:
|
||||||
|
raise ConditionErrorMessage(
|
||||||
|
"zone", f"entity {entity_id} has no 'longitude' attribute"
|
||||||
|
)
|
||||||
|
|
||||||
|
return in_zone(
|
||||||
|
zone_ent, latitude, longitude, entity.attributes.get(ATTR_GPS_ACCURACY, 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ZoneCondition(Condition):
|
||||||
|
"""Zone condition."""
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||||
|
"""Initialize condition."""
|
||||||
|
self._config = config
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def async_validate_condition_config(
|
||||||
|
cls, hass: HomeAssistant, config: ConfigType
|
||||||
|
) -> ConfigType:
|
||||||
|
"""Validate config."""
|
||||||
|
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||||
|
|
||||||
|
async def async_condition_from_config(self) -> ConditionCheckerType:
|
||||||
|
"""Wrap action method with zone based condition."""
|
||||||
|
entity_ids = self._config.get(CONF_ENTITY_ID, [])
|
||||||
|
zone_entity_ids = self._config.get(CONF_ZONE, [])
|
||||||
|
|
||||||
|
@trace_condition_function
|
||||||
|
def if_in_zone(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||||
|
"""Test if condition."""
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
all_ok = True
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
entity_ok = False
|
||||||
|
for zone_entity_id in zone_entity_ids:
|
||||||
|
try:
|
||||||
|
if zone(hass, zone_entity_id, entity_id):
|
||||||
|
entity_ok = True
|
||||||
|
except ConditionErrorMessage as ex:
|
||||||
|
errors.append(
|
||||||
|
ConditionErrorMessage(
|
||||||
|
"zone",
|
||||||
|
(
|
||||||
|
f"error matching {entity_id} with {zone_entity_id}:"
|
||||||
|
f" {ex.message}"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not entity_ok:
|
||||||
|
all_ok = False
|
||||||
|
|
||||||
|
# Raise the errors only if no definitive result was found
|
||||||
|
if errors and not all_ok:
|
||||||
|
raise ConditionErrorContainer("zone", errors=errors)
|
||||||
|
|
||||||
|
return all_ok
|
||||||
|
|
||||||
|
return if_in_zone
|
||||||
|
|
||||||
|
|
||||||
|
CONDITIONS: dict[str, type[Condition]] = {
|
||||||
|
"zone": ZoneCondition,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||||
|
"""Return the sun conditions."""
|
||||||
|
return CONDITIONS
|
@ -22,7 +22,6 @@ from homeassistant.core import (
|
|||||||
callback,
|
callback,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers import (
|
from homeassistant.helpers import (
|
||||||
condition,
|
|
||||||
config_validation as cv,
|
config_validation as cv,
|
||||||
entity_registry as er,
|
entity_registry as er,
|
||||||
location,
|
location,
|
||||||
@ -31,6 +30,8 @@ from homeassistant.helpers.event import async_track_state_change_event
|
|||||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
|
from . import condition
|
||||||
|
|
||||||
EVENT_ENTER = "enter"
|
EVENT_ENTER = "enter"
|
||||||
EVENT_LEAVE = "leave"
|
EVENT_LEAVE = "leave"
|
||||||
DEFAULT_EVENT = EVENT_ENTER
|
DEFAULT_EVENT = EVENT_ENTER
|
||||||
|
@ -18,9 +18,6 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_DEVICE_CLASS,
|
ATTR_DEVICE_CLASS,
|
||||||
ATTR_GPS_ACCURACY,
|
|
||||||
ATTR_LATITUDE,
|
|
||||||
ATTR_LONGITUDE,
|
|
||||||
CONF_ABOVE,
|
CONF_ABOVE,
|
||||||
CONF_AFTER,
|
CONF_AFTER,
|
||||||
CONF_ATTRIBUTE,
|
CONF_ATTRIBUTE,
|
||||||
@ -36,7 +33,6 @@ from homeassistant.const import (
|
|||||||
CONF_STATE,
|
CONF_STATE,
|
||||||
CONF_VALUE_TEMPLATE,
|
CONF_VALUE_TEMPLATE,
|
||||||
CONF_WEEKDAY,
|
CONF_WEEKDAY,
|
||||||
CONF_ZONE,
|
|
||||||
ENTITY_MATCH_ALL,
|
ENTITY_MATCH_ALL,
|
||||||
ENTITY_MATCH_ANY,
|
ENTITY_MATCH_ANY,
|
||||||
STATE_UNAVAILABLE,
|
STATE_UNAVAILABLE,
|
||||||
@ -95,7 +91,6 @@ _PLATFORM_ALIASES: dict[str | None, str | None] = {
|
|||||||
"template": None,
|
"template": None,
|
||||||
"time": None,
|
"time": None,
|
||||||
"trigger": None,
|
"trigger": None,
|
||||||
"zone": None,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
INPUT_ENTITY_ID = re.compile(
|
INPUT_ENTITY_ID = re.compile(
|
||||||
@ -919,101 +914,6 @@ def time_from_config(config: ConfigType) -> ConditionCheckerType:
|
|||||||
return time_if
|
return time_if
|
||||||
|
|
||||||
|
|
||||||
def zone(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
zone_ent: str | State | None,
|
|
||||||
entity: str | State | None,
|
|
||||||
) -> bool:
|
|
||||||
"""Test if zone-condition matches.
|
|
||||||
|
|
||||||
Async friendly.
|
|
||||||
"""
|
|
||||||
from homeassistant.components import zone as zone_cmp # noqa: PLC0415
|
|
||||||
|
|
||||||
if zone_ent is None:
|
|
||||||
raise ConditionErrorMessage("zone", "no zone specified")
|
|
||||||
|
|
||||||
if isinstance(zone_ent, str):
|
|
||||||
zone_ent_id = zone_ent
|
|
||||||
|
|
||||||
if (zone_ent := hass.states.get(zone_ent)) is None:
|
|
||||||
raise ConditionErrorMessage("zone", f"unknown zone {zone_ent_id}")
|
|
||||||
|
|
||||||
if entity is None:
|
|
||||||
raise ConditionErrorMessage("zone", "no entity specified")
|
|
||||||
|
|
||||||
if isinstance(entity, str):
|
|
||||||
entity_id = entity
|
|
||||||
|
|
||||||
if (entity := hass.states.get(entity)) is None:
|
|
||||||
raise ConditionErrorMessage("zone", f"unknown entity {entity_id}")
|
|
||||||
else:
|
|
||||||
entity_id = entity.entity_id
|
|
||||||
|
|
||||||
if entity.state in (
|
|
||||||
STATE_UNAVAILABLE,
|
|
||||||
STATE_UNKNOWN,
|
|
||||||
):
|
|
||||||
return False
|
|
||||||
|
|
||||||
latitude = entity.attributes.get(ATTR_LATITUDE)
|
|
||||||
longitude = entity.attributes.get(ATTR_LONGITUDE)
|
|
||||||
|
|
||||||
if latitude is None:
|
|
||||||
raise ConditionErrorMessage(
|
|
||||||
"zone", f"entity {entity_id} has no 'latitude' attribute"
|
|
||||||
)
|
|
||||||
|
|
||||||
if longitude is None:
|
|
||||||
raise ConditionErrorMessage(
|
|
||||||
"zone", f"entity {entity_id} has no 'longitude' attribute"
|
|
||||||
)
|
|
||||||
|
|
||||||
return zone_cmp.in_zone(
|
|
||||||
zone_ent, latitude, longitude, entity.attributes.get(ATTR_GPS_ACCURACY, 0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def zone_from_config(config: ConfigType) -> ConditionCheckerType:
|
|
||||||
"""Wrap action method with zone based condition."""
|
|
||||||
entity_ids = config.get(CONF_ENTITY_ID, [])
|
|
||||||
zone_entity_ids = config.get(CONF_ZONE, [])
|
|
||||||
|
|
||||||
@trace_condition_function
|
|
||||||
def if_in_zone(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
|
||||||
"""Test if condition."""
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
all_ok = True
|
|
||||||
for entity_id in entity_ids:
|
|
||||||
entity_ok = False
|
|
||||||
for zone_entity_id in zone_entity_ids:
|
|
||||||
try:
|
|
||||||
if zone(hass, zone_entity_id, entity_id):
|
|
||||||
entity_ok = True
|
|
||||||
except ConditionErrorMessage as ex:
|
|
||||||
errors.append(
|
|
||||||
ConditionErrorMessage(
|
|
||||||
"zone",
|
|
||||||
(
|
|
||||||
f"error matching {entity_id} with {zone_entity_id}:"
|
|
||||||
f" {ex.message}"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not entity_ok:
|
|
||||||
all_ok = False
|
|
||||||
|
|
||||||
# Raise the errors only if no definitive result was found
|
|
||||||
if errors and not all_ok:
|
|
||||||
raise ConditionErrorContainer("zone", errors=errors)
|
|
||||||
|
|
||||||
return all_ok
|
|
||||||
|
|
||||||
return if_in_zone
|
|
||||||
|
|
||||||
|
|
||||||
async def async_trigger_from_config(
|
async def async_trigger_from_config(
|
||||||
hass: HomeAssistant, config: ConfigType
|
hass: HomeAssistant, config: ConfigType
|
||||||
) -> ConditionCheckerType:
|
) -> ConditionCheckerType:
|
||||||
|
@ -1570,18 +1570,6 @@ TRIGGER_CONDITION_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
ZONE_CONDITION_SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
**CONDITION_BASE_SCHEMA,
|
|
||||||
vol.Required(CONF_CONDITION): "zone",
|
|
||||||
vol.Required(CONF_ENTITY_ID): entity_ids,
|
|
||||||
vol.Required("zone"): entity_ids,
|
|
||||||
# To support use_trigger_value in automation
|
|
||||||
# Deprecated 2016/04/25
|
|
||||||
vol.Optional("event"): vol.Any("enter", "leave"),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
AND_CONDITION_SCHEMA = vol.Schema(
|
AND_CONDITION_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
**CONDITION_BASE_SCHEMA,
|
**CONDITION_BASE_SCHEMA,
|
||||||
@ -1729,7 +1717,6 @@ BUILT_IN_CONDITIONS: ValueSchemas = {
|
|||||||
"template": TEMPLATE_CONDITION_SCHEMA,
|
"template": TEMPLATE_CONDITION_SCHEMA,
|
||||||
"time": TIME_CONDITION_SCHEMA,
|
"time": TIME_CONDITION_SCHEMA,
|
||||||
"trigger": TRIGGER_CONDITION_SCHEMA,
|
"trigger": TRIGGER_CONDITION_SCHEMA,
|
||||||
"zone": ZONE_CONDITION_SCHEMA,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -9,17 +9,13 @@ from enum import Enum
|
|||||||
from functools import cache, partial
|
from functools import cache, partial
|
||||||
import logging
|
import logging
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import TYPE_CHECKING, Any, TypedDict, TypeGuard, cast
|
from typing import TYPE_CHECKING, Any, TypedDict, cast, override
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.auth.permissions.const import CAT_ENTITIES, POLICY_CONTROL
|
from homeassistant.auth.permissions.const import CAT_ENTITIES, POLICY_CONTROL
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_AREA_ID,
|
|
||||||
ATTR_DEVICE_ID,
|
|
||||||
ATTR_ENTITY_ID,
|
ATTR_ENTITY_ID,
|
||||||
ATTR_FLOOR_ID,
|
|
||||||
ATTR_LABEL_ID,
|
|
||||||
CONF_ACTION,
|
CONF_ACTION,
|
||||||
CONF_ENTITY_ID,
|
CONF_ENTITY_ID,
|
||||||
CONF_SERVICE_DATA,
|
CONF_SERVICE_DATA,
|
||||||
@ -54,16 +50,14 @@ from homeassistant.util.yaml import load_yaml_dict
|
|||||||
from homeassistant.util.yaml.loader import JSON_TYPE
|
from homeassistant.util.yaml.loader import JSON_TYPE
|
||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
area_registry,
|
|
||||||
config_validation as cv,
|
config_validation as cv,
|
||||||
device_registry,
|
device_registry,
|
||||||
entity_registry,
|
entity_registry,
|
||||||
floor_registry,
|
target as target_helpers,
|
||||||
label_registry,
|
|
||||||
template,
|
template,
|
||||||
translation,
|
translation,
|
||||||
)
|
)
|
||||||
from .group import expand_entity_ids
|
from .deprecation import deprecated_class, deprecated_function
|
||||||
from .selector import TargetSelector
|
from .selector import TargetSelector
|
||||||
from .typing import ConfigType, TemplateVarsType, VolDictType, VolSchemaType
|
from .typing import ConfigType, TemplateVarsType, VolDictType, VolSchemaType
|
||||||
|
|
||||||
@ -225,87 +219,31 @@ class ServiceParams(TypedDict):
|
|||||||
target: dict | None
|
target: dict | None
|
||||||
|
|
||||||
|
|
||||||
class ServiceTargetSelector:
|
@deprecated_class(
|
||||||
|
"homeassistant.helpers.target.TargetSelectorData",
|
||||||
|
breaks_in_ha_version="2026.8",
|
||||||
|
)
|
||||||
|
class ServiceTargetSelector(target_helpers.TargetSelectorData):
|
||||||
"""Class to hold a target selector for a service."""
|
"""Class to hold a target selector for a service."""
|
||||||
|
|
||||||
__slots__ = ("area_ids", "device_ids", "entity_ids", "floor_ids", "label_ids")
|
|
||||||
|
|
||||||
def __init__(self, service_call: ServiceCall) -> None:
|
def __init__(self, service_call: ServiceCall) -> None:
|
||||||
"""Extract ids from service call data."""
|
"""Extract ids from service call data."""
|
||||||
service_call_data = service_call.data
|
super().__init__(service_call.data)
|
||||||
entity_ids: str | list | None = service_call_data.get(ATTR_ENTITY_ID)
|
|
||||||
device_ids: str | list | None = service_call_data.get(ATTR_DEVICE_ID)
|
|
||||||
area_ids: str | list | None = service_call_data.get(ATTR_AREA_ID)
|
|
||||||
floor_ids: str | list | None = service_call_data.get(ATTR_FLOOR_ID)
|
|
||||||
label_ids: str | list | None = service_call_data.get(ATTR_LABEL_ID)
|
|
||||||
|
|
||||||
self.entity_ids = (
|
|
||||||
set(cv.ensure_list(entity_ids)) if _has_match(entity_ids) else set()
|
|
||||||
)
|
|
||||||
self.device_ids = (
|
|
||||||
set(cv.ensure_list(device_ids)) if _has_match(device_ids) else set()
|
|
||||||
)
|
|
||||||
self.area_ids = set(cv.ensure_list(area_ids)) if _has_match(area_ids) else set()
|
|
||||||
self.floor_ids = (
|
|
||||||
set(cv.ensure_list(floor_ids)) if _has_match(floor_ids) else set()
|
|
||||||
)
|
|
||||||
self.label_ids = (
|
|
||||||
set(cv.ensure_list(label_ids)) if _has_match(label_ids) else set()
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def has_any_selector(self) -> bool:
|
|
||||||
"""Determine if any selectors are present."""
|
|
||||||
return bool(
|
|
||||||
self.entity_ids
|
|
||||||
or self.device_ids
|
|
||||||
or self.area_ids
|
|
||||||
or self.floor_ids
|
|
||||||
or self.label_ids
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(slots=True)
|
@deprecated_class(
|
||||||
class SelectedEntities:
|
"homeassistant.helpers.target.SelectedEntities",
|
||||||
|
breaks_in_ha_version="2026.8",
|
||||||
|
)
|
||||||
|
class SelectedEntities(target_helpers.SelectedEntities):
|
||||||
"""Class to hold the selected entities."""
|
"""Class to hold the selected entities."""
|
||||||
|
|
||||||
# Entities that were explicitly mentioned.
|
@override
|
||||||
referenced: set[str] = dataclasses.field(default_factory=set)
|
def log_missing(
|
||||||
|
self, missing_entities: set[str], logger: logging.Logger | None = None
|
||||||
# Entities that were referenced via device/area/floor/label ID.
|
) -> None:
|
||||||
# Should not trigger a warning when they don't exist.
|
|
||||||
indirectly_referenced: set[str] = dataclasses.field(default_factory=set)
|
|
||||||
|
|
||||||
# Referenced items that could not be found.
|
|
||||||
missing_devices: set[str] = dataclasses.field(default_factory=set)
|
|
||||||
missing_areas: set[str] = dataclasses.field(default_factory=set)
|
|
||||||
missing_floors: set[str] = dataclasses.field(default_factory=set)
|
|
||||||
missing_labels: set[str] = dataclasses.field(default_factory=set)
|
|
||||||
|
|
||||||
# Referenced devices
|
|
||||||
referenced_devices: set[str] = dataclasses.field(default_factory=set)
|
|
||||||
referenced_areas: set[str] = dataclasses.field(default_factory=set)
|
|
||||||
|
|
||||||
def log_missing(self, missing_entities: set[str]) -> None:
|
|
||||||
"""Log about missing items."""
|
"""Log about missing items."""
|
||||||
parts = []
|
super().log_missing(missing_entities, logger or _LOGGER)
|
||||||
for label, items in (
|
|
||||||
("floors", self.missing_floors),
|
|
||||||
("areas", self.missing_areas),
|
|
||||||
("devices", self.missing_devices),
|
|
||||||
("entities", missing_entities),
|
|
||||||
("labels", self.missing_labels),
|
|
||||||
):
|
|
||||||
if items:
|
|
||||||
parts.append(f"{label} {', '.join(sorted(items))}")
|
|
||||||
|
|
||||||
if not parts:
|
|
||||||
return
|
|
||||||
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Referenced %s are missing or not currently available",
|
|
||||||
", ".join(parts),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bind_hass
|
@bind_hass
|
||||||
@ -466,7 +404,10 @@ async def async_extract_entities[_EntityT: Entity](
|
|||||||
if data_ent_id == ENTITY_MATCH_ALL:
|
if data_ent_id == ENTITY_MATCH_ALL:
|
||||||
return [entity for entity in entities if entity.available]
|
return [entity for entity in entities if entity.available]
|
||||||
|
|
||||||
referenced = async_extract_referenced_entity_ids(hass, service_call, expand_group)
|
selector_data = target_helpers.TargetSelectorData(service_call.data)
|
||||||
|
referenced = target_helpers.async_extract_referenced_entity_ids(
|
||||||
|
hass, selector_data, expand_group
|
||||||
|
)
|
||||||
combined = referenced.referenced | referenced.indirectly_referenced
|
combined = referenced.referenced | referenced.indirectly_referenced
|
||||||
|
|
||||||
found = []
|
found = []
|
||||||
@ -482,7 +423,7 @@ async def async_extract_entities[_EntityT: Entity](
|
|||||||
|
|
||||||
found.append(entity)
|
found.append(entity)
|
||||||
|
|
||||||
referenced.log_missing(referenced.referenced & combined)
|
referenced.log_missing(referenced.referenced & combined, _LOGGER)
|
||||||
|
|
||||||
return found
|
return found
|
||||||
|
|
||||||
@ -495,141 +436,27 @@ async def async_extract_entity_ids(
|
|||||||
|
|
||||||
Will convert group entity ids to the entity ids it represents.
|
Will convert group entity ids to the entity ids it represents.
|
||||||
"""
|
"""
|
||||||
referenced = async_extract_referenced_entity_ids(hass, service_call, expand_group)
|
selector_data = target_helpers.TargetSelectorData(service_call.data)
|
||||||
|
referenced = target_helpers.async_extract_referenced_entity_ids(
|
||||||
|
hass, selector_data, expand_group
|
||||||
|
)
|
||||||
return referenced.referenced | referenced.indirectly_referenced
|
return referenced.referenced | referenced.indirectly_referenced
|
||||||
|
|
||||||
|
|
||||||
def _has_match(ids: str | list[str] | None) -> TypeGuard[str | list[str]]:
|
@deprecated_function(
|
||||||
"""Check if ids can match anything."""
|
"homeassistant.helpers.target.async_extract_referenced_entity_ids",
|
||||||
return ids not in (None, ENTITY_MATCH_NONE)
|
breaks_in_ha_version="2026.8",
|
||||||
|
)
|
||||||
|
|
||||||
@bind_hass
|
@bind_hass
|
||||||
def async_extract_referenced_entity_ids(
|
def async_extract_referenced_entity_ids(
|
||||||
hass: HomeAssistant, service_call: ServiceCall, expand_group: bool = True
|
hass: HomeAssistant, service_call: ServiceCall, expand_group: bool = True
|
||||||
) -> SelectedEntities:
|
) -> SelectedEntities:
|
||||||
"""Extract referenced entity IDs from a service call."""
|
"""Extract referenced entity IDs from a service call."""
|
||||||
selector = ServiceTargetSelector(service_call)
|
selector_data = target_helpers.TargetSelectorData(service_call.data)
|
||||||
selected = SelectedEntities()
|
selected = target_helpers.async_extract_referenced_entity_ids(
|
||||||
|
hass, selector_data, expand_group
|
||||||
if not selector.has_any_selector:
|
|
||||||
return selected
|
|
||||||
|
|
||||||
entity_ids: set[str] | list[str] = selector.entity_ids
|
|
||||||
if expand_group:
|
|
||||||
entity_ids = expand_entity_ids(hass, entity_ids)
|
|
||||||
|
|
||||||
selected.referenced.update(entity_ids)
|
|
||||||
|
|
||||||
if (
|
|
||||||
not selector.device_ids
|
|
||||||
and not selector.area_ids
|
|
||||||
and not selector.floor_ids
|
|
||||||
and not selector.label_ids
|
|
||||||
):
|
|
||||||
return selected
|
|
||||||
|
|
||||||
entities = entity_registry.async_get(hass).entities
|
|
||||||
dev_reg = device_registry.async_get(hass)
|
|
||||||
area_reg = area_registry.async_get(hass)
|
|
||||||
|
|
||||||
if selector.floor_ids:
|
|
||||||
floor_reg = floor_registry.async_get(hass)
|
|
||||||
for floor_id in selector.floor_ids:
|
|
||||||
if floor_id not in floor_reg.floors:
|
|
||||||
selected.missing_floors.add(floor_id)
|
|
||||||
|
|
||||||
for area_id in selector.area_ids:
|
|
||||||
if area_id not in area_reg.areas:
|
|
||||||
selected.missing_areas.add(area_id)
|
|
||||||
|
|
||||||
for device_id in selector.device_ids:
|
|
||||||
if device_id not in dev_reg.devices:
|
|
||||||
selected.missing_devices.add(device_id)
|
|
||||||
|
|
||||||
if selector.label_ids:
|
|
||||||
label_reg = label_registry.async_get(hass)
|
|
||||||
for label_id in selector.label_ids:
|
|
||||||
if label_id not in label_reg.labels:
|
|
||||||
selected.missing_labels.add(label_id)
|
|
||||||
|
|
||||||
for entity_entry in entities.get_entries_for_label(label_id):
|
|
||||||
if (
|
|
||||||
entity_entry.entity_category is None
|
|
||||||
and entity_entry.hidden_by is None
|
|
||||||
):
|
|
||||||
selected.indirectly_referenced.add(entity_entry.entity_id)
|
|
||||||
|
|
||||||
for device_entry in dev_reg.devices.get_devices_for_label(label_id):
|
|
||||||
selected.referenced_devices.add(device_entry.id)
|
|
||||||
|
|
||||||
for area_entry in area_reg.areas.get_areas_for_label(label_id):
|
|
||||||
selected.referenced_areas.add(area_entry.id)
|
|
||||||
|
|
||||||
# Find areas for targeted floors
|
|
||||||
if selector.floor_ids:
|
|
||||||
selected.referenced_areas.update(
|
|
||||||
area_entry.id
|
|
||||||
for floor_id in selector.floor_ids
|
|
||||||
for area_entry in area_reg.areas.get_areas_for_floor(floor_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
selected.referenced_areas.update(selector.area_ids)
|
|
||||||
selected.referenced_devices.update(selector.device_ids)
|
|
||||||
|
|
||||||
if not selected.referenced_areas and not selected.referenced_devices:
|
|
||||||
return selected
|
|
||||||
|
|
||||||
# Add indirectly referenced by device
|
|
||||||
selected.indirectly_referenced.update(
|
|
||||||
entry.entity_id
|
|
||||||
for device_id in selected.referenced_devices
|
|
||||||
for entry in entities.get_entries_for_device_id(device_id)
|
|
||||||
# Do not add entities which are hidden or which are config
|
|
||||||
# or diagnostic entities.
|
|
||||||
if (entry.entity_category is None and entry.hidden_by is None)
|
|
||||||
)
|
)
|
||||||
|
return SelectedEntities(**dataclasses.asdict(selected))
|
||||||
# Find devices for targeted areas
|
|
||||||
referenced_devices_by_area: set[str] = set()
|
|
||||||
if selected.referenced_areas:
|
|
||||||
for area_id in selected.referenced_areas:
|
|
||||||
referenced_devices_by_area.update(
|
|
||||||
device_entry.id
|
|
||||||
for device_entry in dev_reg.devices.get_devices_for_area_id(area_id)
|
|
||||||
)
|
|
||||||
selected.referenced_devices.update(referenced_devices_by_area)
|
|
||||||
|
|
||||||
# Add indirectly referenced by area
|
|
||||||
selected.indirectly_referenced.update(
|
|
||||||
entry.entity_id
|
|
||||||
for area_id in selected.referenced_areas
|
|
||||||
# The entity's area matches a targeted area
|
|
||||||
for entry in entities.get_entries_for_area_id(area_id)
|
|
||||||
# Do not add entities which are hidden or which are config
|
|
||||||
# or diagnostic entities.
|
|
||||||
if entry.entity_category is None and entry.hidden_by is None
|
|
||||||
)
|
|
||||||
# Add indirectly referenced by area through device
|
|
||||||
selected.indirectly_referenced.update(
|
|
||||||
entry.entity_id
|
|
||||||
for device_id in referenced_devices_by_area
|
|
||||||
for entry in entities.get_entries_for_device_id(device_id)
|
|
||||||
# Do not add entities which are hidden or which are config
|
|
||||||
# or diagnostic entities.
|
|
||||||
if (
|
|
||||||
entry.entity_category is None
|
|
||||||
and entry.hidden_by is None
|
|
||||||
and (
|
|
||||||
# The entity's device matches a device referenced
|
|
||||||
# by an area and the entity
|
|
||||||
# has no explicitly set area
|
|
||||||
not entry.area_id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return selected
|
|
||||||
|
|
||||||
|
|
||||||
@bind_hass
|
@bind_hass
|
||||||
@ -637,7 +464,10 @@ async def async_extract_config_entry_ids(
|
|||||||
hass: HomeAssistant, service_call: ServiceCall, expand_group: bool = True
|
hass: HomeAssistant, service_call: ServiceCall, expand_group: bool = True
|
||||||
) -> set[str]:
|
) -> set[str]:
|
||||||
"""Extract referenced config entry ids from a service call."""
|
"""Extract referenced config entry ids from a service call."""
|
||||||
referenced = async_extract_referenced_entity_ids(hass, service_call, expand_group)
|
selector_data = target_helpers.TargetSelectorData(service_call.data)
|
||||||
|
referenced = target_helpers.async_extract_referenced_entity_ids(
|
||||||
|
hass, selector_data, expand_group
|
||||||
|
)
|
||||||
ent_reg = entity_registry.async_get(hass)
|
ent_reg = entity_registry.async_get(hass)
|
||||||
dev_reg = device_registry.async_get(hass)
|
dev_reg = device_registry.async_get(hass)
|
||||||
config_entry_ids: set[str] = set()
|
config_entry_ids: set[str] = set()
|
||||||
@ -948,11 +778,14 @@ async def entity_service_call(
|
|||||||
target_all_entities = call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_ALL
|
target_all_entities = call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_ALL
|
||||||
|
|
||||||
if target_all_entities:
|
if target_all_entities:
|
||||||
referenced: SelectedEntities | None = None
|
referenced: target_helpers.SelectedEntities | None = None
|
||||||
all_referenced: set[str] | None = None
|
all_referenced: set[str] | None = None
|
||||||
else:
|
else:
|
||||||
# A set of entities we're trying to target.
|
# A set of entities we're trying to target.
|
||||||
referenced = async_extract_referenced_entity_ids(hass, call, True)
|
selector_data = target_helpers.TargetSelectorData(call.data)
|
||||||
|
referenced = target_helpers.async_extract_referenced_entity_ids(
|
||||||
|
hass, selector_data, True
|
||||||
|
)
|
||||||
all_referenced = referenced.referenced | referenced.indirectly_referenced
|
all_referenced = referenced.referenced | referenced.indirectly_referenced
|
||||||
|
|
||||||
# If the service function is a string, we'll pass it the service call data
|
# If the service function is a string, we'll pass it the service call data
|
||||||
@ -977,7 +810,7 @@ async def entity_service_call(
|
|||||||
missing = referenced.referenced.copy()
|
missing = referenced.referenced.copy()
|
||||||
for entity in entity_candidates:
|
for entity in entity_candidates:
|
||||||
missing.discard(entity.entity_id)
|
missing.discard(entity.entity_id)
|
||||||
referenced.log_missing(missing)
|
referenced.log_missing(missing, _LOGGER)
|
||||||
|
|
||||||
entities: list[Entity] = []
|
entities: list[Entity] = []
|
||||||
for entity in entity_candidates:
|
for entity in entity_candidates:
|
||||||
|
240
homeassistant/helpers/target.py
Normal file
240
homeassistant/helpers/target.py
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
"""Helpers for dealing with entity targets."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import dataclasses
|
||||||
|
from logging import Logger
|
||||||
|
from typing import TypeGuard
|
||||||
|
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_AREA_ID,
|
||||||
|
ATTR_DEVICE_ID,
|
||||||
|
ATTR_ENTITY_ID,
|
||||||
|
ATTR_FLOOR_ID,
|
||||||
|
ATTR_LABEL_ID,
|
||||||
|
ENTITY_MATCH_NONE,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
area_registry as ar,
|
||||||
|
config_validation as cv,
|
||||||
|
device_registry as dr,
|
||||||
|
entity_registry as er,
|
||||||
|
floor_registry as fr,
|
||||||
|
group,
|
||||||
|
label_registry as lr,
|
||||||
|
)
|
||||||
|
from .typing import ConfigType
|
||||||
|
|
||||||
|
|
||||||
|
def _has_match(ids: str | list[str] | None) -> TypeGuard[str | list[str]]:
|
||||||
|
"""Check if ids can match anything."""
|
||||||
|
return ids not in (None, ENTITY_MATCH_NONE)
|
||||||
|
|
||||||
|
|
||||||
|
class TargetSelectorData:
|
||||||
|
"""Class to hold data of target selector."""
|
||||||
|
|
||||||
|
__slots__ = ("area_ids", "device_ids", "entity_ids", "floor_ids", "label_ids")
|
||||||
|
|
||||||
|
def __init__(self, config: ConfigType) -> None:
|
||||||
|
"""Extract ids from the config."""
|
||||||
|
entity_ids: str | list | None = config.get(ATTR_ENTITY_ID)
|
||||||
|
device_ids: str | list | None = config.get(ATTR_DEVICE_ID)
|
||||||
|
area_ids: str | list | None = config.get(ATTR_AREA_ID)
|
||||||
|
floor_ids: str | list | None = config.get(ATTR_FLOOR_ID)
|
||||||
|
label_ids: str | list | None = config.get(ATTR_LABEL_ID)
|
||||||
|
|
||||||
|
self.entity_ids = (
|
||||||
|
set(cv.ensure_list(entity_ids)) if _has_match(entity_ids) else set()
|
||||||
|
)
|
||||||
|
self.device_ids = (
|
||||||
|
set(cv.ensure_list(device_ids)) if _has_match(device_ids) else set()
|
||||||
|
)
|
||||||
|
self.area_ids = set(cv.ensure_list(area_ids)) if _has_match(area_ids) else set()
|
||||||
|
self.floor_ids = (
|
||||||
|
set(cv.ensure_list(floor_ids)) if _has_match(floor_ids) else set()
|
||||||
|
)
|
||||||
|
self.label_ids = (
|
||||||
|
set(cv.ensure_list(label_ids)) if _has_match(label_ids) else set()
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_any_selector(self) -> bool:
|
||||||
|
"""Determine if any selectors are present."""
|
||||||
|
return bool(
|
||||||
|
self.entity_ids
|
||||||
|
or self.device_ids
|
||||||
|
or self.area_ids
|
||||||
|
or self.floor_ids
|
||||||
|
or self.label_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass(slots=True)
|
||||||
|
class SelectedEntities:
|
||||||
|
"""Class to hold the selected entities."""
|
||||||
|
|
||||||
|
# Entities that were explicitly mentioned.
|
||||||
|
referenced: set[str] = dataclasses.field(default_factory=set)
|
||||||
|
|
||||||
|
# Entities that were referenced via device/area/floor/label ID.
|
||||||
|
# Should not trigger a warning when they don't exist.
|
||||||
|
indirectly_referenced: set[str] = dataclasses.field(default_factory=set)
|
||||||
|
|
||||||
|
# Referenced items that could not be found.
|
||||||
|
missing_devices: set[str] = dataclasses.field(default_factory=set)
|
||||||
|
missing_areas: set[str] = dataclasses.field(default_factory=set)
|
||||||
|
missing_floors: set[str] = dataclasses.field(default_factory=set)
|
||||||
|
missing_labels: set[str] = dataclasses.field(default_factory=set)
|
||||||
|
|
||||||
|
referenced_devices: set[str] = dataclasses.field(default_factory=set)
|
||||||
|
referenced_areas: set[str] = dataclasses.field(default_factory=set)
|
||||||
|
|
||||||
|
def log_missing(self, missing_entities: set[str], logger: Logger) -> None:
|
||||||
|
"""Log about missing items."""
|
||||||
|
parts = []
|
||||||
|
for label, items in (
|
||||||
|
("floors", self.missing_floors),
|
||||||
|
("areas", self.missing_areas),
|
||||||
|
("devices", self.missing_devices),
|
||||||
|
("entities", missing_entities),
|
||||||
|
("labels", self.missing_labels),
|
||||||
|
):
|
||||||
|
if items:
|
||||||
|
parts.append(f"{label} {', '.join(sorted(items))}")
|
||||||
|
|
||||||
|
if not parts:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
"Referenced %s are missing or not currently available",
|
||||||
|
", ".join(parts),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def async_extract_referenced_entity_ids(
|
||||||
|
hass: HomeAssistant, selector_data: TargetSelectorData, expand_group: bool = True
|
||||||
|
) -> SelectedEntities:
|
||||||
|
"""Extract referenced entity IDs from a target selector."""
|
||||||
|
selected = SelectedEntities()
|
||||||
|
|
||||||
|
if not selector_data.has_any_selector:
|
||||||
|
return selected
|
||||||
|
|
||||||
|
entity_ids: set[str] | list[str] = selector_data.entity_ids
|
||||||
|
if expand_group:
|
||||||
|
entity_ids = group.expand_entity_ids(hass, entity_ids)
|
||||||
|
|
||||||
|
selected.referenced.update(entity_ids)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not selector_data.device_ids
|
||||||
|
and not selector_data.area_ids
|
||||||
|
and not selector_data.floor_ids
|
||||||
|
and not selector_data.label_ids
|
||||||
|
):
|
||||||
|
return selected
|
||||||
|
|
||||||
|
entities = er.async_get(hass).entities
|
||||||
|
dev_reg = dr.async_get(hass)
|
||||||
|
area_reg = ar.async_get(hass)
|
||||||
|
|
||||||
|
if selector_data.floor_ids:
|
||||||
|
floor_reg = fr.async_get(hass)
|
||||||
|
for floor_id in selector_data.floor_ids:
|
||||||
|
if floor_id not in floor_reg.floors:
|
||||||
|
selected.missing_floors.add(floor_id)
|
||||||
|
|
||||||
|
for area_id in selector_data.area_ids:
|
||||||
|
if area_id not in area_reg.areas:
|
||||||
|
selected.missing_areas.add(area_id)
|
||||||
|
|
||||||
|
for device_id in selector_data.device_ids:
|
||||||
|
if device_id not in dev_reg.devices:
|
||||||
|
selected.missing_devices.add(device_id)
|
||||||
|
|
||||||
|
if selector_data.label_ids:
|
||||||
|
label_reg = lr.async_get(hass)
|
||||||
|
for label_id in selector_data.label_ids:
|
||||||
|
if label_id not in label_reg.labels:
|
||||||
|
selected.missing_labels.add(label_id)
|
||||||
|
|
||||||
|
for entity_entry in entities.get_entries_for_label(label_id):
|
||||||
|
if (
|
||||||
|
entity_entry.entity_category is None
|
||||||
|
and entity_entry.hidden_by is None
|
||||||
|
):
|
||||||
|
selected.indirectly_referenced.add(entity_entry.entity_id)
|
||||||
|
|
||||||
|
for device_entry in dev_reg.devices.get_devices_for_label(label_id):
|
||||||
|
selected.referenced_devices.add(device_entry.id)
|
||||||
|
|
||||||
|
for area_entry in area_reg.areas.get_areas_for_label(label_id):
|
||||||
|
selected.referenced_areas.add(area_entry.id)
|
||||||
|
|
||||||
|
# Find areas for targeted floors
|
||||||
|
if selector_data.floor_ids:
|
||||||
|
selected.referenced_areas.update(
|
||||||
|
area_entry.id
|
||||||
|
for floor_id in selector_data.floor_ids
|
||||||
|
for area_entry in area_reg.areas.get_areas_for_floor(floor_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
selected.referenced_areas.update(selector_data.area_ids)
|
||||||
|
selected.referenced_devices.update(selector_data.device_ids)
|
||||||
|
|
||||||
|
if not selected.referenced_areas and not selected.referenced_devices:
|
||||||
|
return selected
|
||||||
|
|
||||||
|
# Add indirectly referenced by device
|
||||||
|
selected.indirectly_referenced.update(
|
||||||
|
entry.entity_id
|
||||||
|
for device_id in selected.referenced_devices
|
||||||
|
for entry in entities.get_entries_for_device_id(device_id)
|
||||||
|
# Do not add entities which are hidden or which are config
|
||||||
|
# or diagnostic entities.
|
||||||
|
if (entry.entity_category is None and entry.hidden_by is None)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Find devices for targeted areas
|
||||||
|
referenced_devices_by_area: set[str] = set()
|
||||||
|
if selected.referenced_areas:
|
||||||
|
for area_id in selected.referenced_areas:
|
||||||
|
referenced_devices_by_area.update(
|
||||||
|
device_entry.id
|
||||||
|
for device_entry in dev_reg.devices.get_devices_for_area_id(area_id)
|
||||||
|
)
|
||||||
|
selected.referenced_devices.update(referenced_devices_by_area)
|
||||||
|
|
||||||
|
# Add indirectly referenced by area
|
||||||
|
selected.indirectly_referenced.update(
|
||||||
|
entry.entity_id
|
||||||
|
for area_id in selected.referenced_areas
|
||||||
|
# The entity's area matches a targeted area
|
||||||
|
for entry in entities.get_entries_for_area_id(area_id)
|
||||||
|
# Do not add entities which are hidden or which are config
|
||||||
|
# or diagnostic entities.
|
||||||
|
if entry.entity_category is None and entry.hidden_by is None
|
||||||
|
)
|
||||||
|
# Add indirectly referenced by area through device
|
||||||
|
selected.indirectly_referenced.update(
|
||||||
|
entry.entity_id
|
||||||
|
for device_id in referenced_devices_by_area
|
||||||
|
for entry in entities.get_entries_for_device_id(device_id)
|
||||||
|
# Do not add entities which are hidden or which are config
|
||||||
|
# or diagnostic entities.
|
||||||
|
if (
|
||||||
|
entry.entity_category is None
|
||||||
|
and entry.hidden_by is None
|
||||||
|
and (
|
||||||
|
# The entity's device matches a device referenced
|
||||||
|
# by an area and the entity
|
||||||
|
# has no explicitly set area
|
||||||
|
not entry.area_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return selected
|
14
requirements_all.txt
generated
14
requirements_all.txt
generated
@ -84,7 +84,7 @@ PyQRCode==1.2.1
|
|||||||
PyRMVtransport==0.3.3
|
PyRMVtransport==0.3.3
|
||||||
|
|
||||||
# homeassistant.components.switchbot
|
# homeassistant.components.switchbot
|
||||||
PySwitchbot==0.67.0
|
PySwitchbot==0.68.1
|
||||||
|
|
||||||
# homeassistant.components.switchmate
|
# homeassistant.components.switchmate
|
||||||
PySwitchmate==0.5.1
|
PySwitchmate==0.5.1
|
||||||
@ -435,7 +435,7 @@ aiowatttime==0.1.1
|
|||||||
aiowebdav2==0.4.6
|
aiowebdav2==0.4.6
|
||||||
|
|
||||||
# homeassistant.components.webostv
|
# homeassistant.components.webostv
|
||||||
aiowebostv==0.7.3
|
aiowebostv==0.7.4
|
||||||
|
|
||||||
# homeassistant.components.withings
|
# homeassistant.components.withings
|
||||||
aiowithings==3.1.6
|
aiowithings==3.1.6
|
||||||
@ -989,7 +989,7 @@ gTTS==2.5.3
|
|||||||
gardena-bluetooth==1.6.0
|
gardena-bluetooth==1.6.0
|
||||||
|
|
||||||
# homeassistant.components.google_assistant_sdk
|
# homeassistant.components.google_assistant_sdk
|
||||||
gassist-text==0.0.12
|
gassist-text==0.0.14
|
||||||
|
|
||||||
# homeassistant.components.google
|
# homeassistant.components.google
|
||||||
gcal-sync==7.1.0
|
gcal-sync==7.1.0
|
||||||
@ -1020,7 +1020,7 @@ georss-qld-bushfire-alert-client==0.8
|
|||||||
getmac==0.9.5
|
getmac==0.9.5
|
||||||
|
|
||||||
# homeassistant.components.gios
|
# homeassistant.components.gios
|
||||||
gios==6.0.0
|
gios==6.1.0
|
||||||
|
|
||||||
# homeassistant.components.gitter
|
# homeassistant.components.gitter
|
||||||
gitterpy==0.1.7
|
gitterpy==0.1.7
|
||||||
@ -1597,7 +1597,7 @@ open-garage==0.2.0
|
|||||||
open-meteo==0.3.2
|
open-meteo==0.3.2
|
||||||
|
|
||||||
# homeassistant.components.openai_conversation
|
# homeassistant.components.openai_conversation
|
||||||
openai==1.76.2
|
openai==1.93.0
|
||||||
|
|
||||||
# homeassistant.components.openerz
|
# homeassistant.components.openerz
|
||||||
openerz-api==0.3.0
|
openerz-api==0.3.0
|
||||||
@ -1962,7 +1962,7 @@ pyeiscp==0.0.7
|
|||||||
pyemoncms==0.1.1
|
pyemoncms==0.1.1
|
||||||
|
|
||||||
# homeassistant.components.enphase_envoy
|
# homeassistant.components.enphase_envoy
|
||||||
pyenphase==2.2.0
|
pyenphase==2.2.1
|
||||||
|
|
||||||
# homeassistant.components.envisalink
|
# homeassistant.components.envisalink
|
||||||
pyenvisalink==4.7
|
pyenvisalink==4.7
|
||||||
@ -2756,7 +2756,7 @@ sentry-sdk==1.45.1
|
|||||||
sfrbox-api==0.0.12
|
sfrbox-api==0.0.12
|
||||||
|
|
||||||
# homeassistant.components.sharkiq
|
# homeassistant.components.sharkiq
|
||||||
sharkiq==1.1.0
|
sharkiq==1.1.1
|
||||||
|
|
||||||
# homeassistant.components.aquostv
|
# homeassistant.components.aquostv
|
||||||
sharp_aquos_rc==0.3.2
|
sharp_aquos_rc==0.3.2
|
||||||
|
14
requirements_test_all.txt
generated
14
requirements_test_all.txt
generated
@ -81,7 +81,7 @@ PyQRCode==1.2.1
|
|||||||
PyRMVtransport==0.3.3
|
PyRMVtransport==0.3.3
|
||||||
|
|
||||||
# homeassistant.components.switchbot
|
# homeassistant.components.switchbot
|
||||||
PySwitchbot==0.67.0
|
PySwitchbot==0.68.1
|
||||||
|
|
||||||
# homeassistant.components.syncthru
|
# homeassistant.components.syncthru
|
||||||
PySyncThru==0.8.0
|
PySyncThru==0.8.0
|
||||||
@ -417,7 +417,7 @@ aiowatttime==0.1.1
|
|||||||
aiowebdav2==0.4.6
|
aiowebdav2==0.4.6
|
||||||
|
|
||||||
# homeassistant.components.webostv
|
# homeassistant.components.webostv
|
||||||
aiowebostv==0.7.3
|
aiowebostv==0.7.4
|
||||||
|
|
||||||
# homeassistant.components.withings
|
# homeassistant.components.withings
|
||||||
aiowithings==3.1.6
|
aiowithings==3.1.6
|
||||||
@ -859,7 +859,7 @@ gTTS==2.5.3
|
|||||||
gardena-bluetooth==1.6.0
|
gardena-bluetooth==1.6.0
|
||||||
|
|
||||||
# homeassistant.components.google_assistant_sdk
|
# homeassistant.components.google_assistant_sdk
|
||||||
gassist-text==0.0.12
|
gassist-text==0.0.14
|
||||||
|
|
||||||
# homeassistant.components.google
|
# homeassistant.components.google
|
||||||
gcal-sync==7.1.0
|
gcal-sync==7.1.0
|
||||||
@ -890,7 +890,7 @@ georss-qld-bushfire-alert-client==0.8
|
|||||||
getmac==0.9.5
|
getmac==0.9.5
|
||||||
|
|
||||||
# homeassistant.components.gios
|
# homeassistant.components.gios
|
||||||
gios==6.0.0
|
gios==6.1.0
|
||||||
|
|
||||||
# homeassistant.components.glances
|
# homeassistant.components.glances
|
||||||
glances-api==0.8.0
|
glances-api==0.8.0
|
||||||
@ -1365,7 +1365,7 @@ open-garage==0.2.0
|
|||||||
open-meteo==0.3.2
|
open-meteo==0.3.2
|
||||||
|
|
||||||
# homeassistant.components.openai_conversation
|
# homeassistant.components.openai_conversation
|
||||||
openai==1.76.2
|
openai==1.93.0
|
||||||
|
|
||||||
# homeassistant.components.openerz
|
# homeassistant.components.openerz
|
||||||
openerz-api==0.3.0
|
openerz-api==0.3.0
|
||||||
@ -1637,7 +1637,7 @@ pyeiscp==0.0.7
|
|||||||
pyemoncms==0.1.1
|
pyemoncms==0.1.1
|
||||||
|
|
||||||
# homeassistant.components.enphase_envoy
|
# homeassistant.components.enphase_envoy
|
||||||
pyenphase==2.2.0
|
pyenphase==2.2.1
|
||||||
|
|
||||||
# homeassistant.components.everlights
|
# homeassistant.components.everlights
|
||||||
pyeverlights==0.1.0
|
pyeverlights==0.1.0
|
||||||
@ -2278,7 +2278,7 @@ sentry-sdk==1.45.1
|
|||||||
sfrbox-api==0.0.12
|
sfrbox-api==0.0.12
|
||||||
|
|
||||||
# homeassistant.components.sharkiq
|
# homeassistant.components.sharkiq
|
||||||
sharkiq==1.1.0
|
sharkiq==1.1.1
|
||||||
|
|
||||||
# homeassistant.components.simplefin
|
# homeassistant.components.simplefin
|
||||||
simplefin4py==0.0.18
|
simplefin4py==0.0.18
|
||||||
|
@ -54,6 +54,7 @@ CONDITIONS_SCHEMA = vol.Schema(
|
|||||||
NON_MIGRATED_INTEGRATIONS = {
|
NON_MIGRATED_INTEGRATIONS = {
|
||||||
"device_automation",
|
"device_automation",
|
||||||
"sun",
|
"sun",
|
||||||
|
"zone",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,16 +1,29 @@
|
|||||||
"""Tests for GIOS."""
|
"""Tests for GIOS."""
|
||||||
|
|
||||||
import json
|
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from homeassistant.components.gios.const import DOMAIN
|
from homeassistant.components.gios.const import DOMAIN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, async_load_fixture
|
from tests.common import (
|
||||||
|
MockConfigEntry,
|
||||||
|
async_load_json_array_fixture,
|
||||||
|
async_load_json_object_fixture,
|
||||||
|
)
|
||||||
|
|
||||||
STATIONS = [
|
STATIONS = [
|
||||||
{"id": 123, "stationName": "Test Name 1", "gegrLat": "99.99", "gegrLon": "88.88"},
|
{
|
||||||
{"id": 321, "stationName": "Test Name 2", "gegrLat": "77.77", "gegrLon": "66.66"},
|
"Identyfikator stacji": 123,
|
||||||
|
"Nazwa stacji": "Test Name 1",
|
||||||
|
"WGS84 φ N": "99.99",
|
||||||
|
"WGS84 λ E": "88.88",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Identyfikator stacji": 321,
|
||||||
|
"Nazwa stacji": "Test Name 2",
|
||||||
|
"WGS84 φ N": "77.77",
|
||||||
|
"WGS84 λ E": "66.66",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -26,13 +39,13 @@ async def init_integration(
|
|||||||
entry_id="86129426118ae32020417a53712d6eef",
|
entry_id="86129426118ae32020417a53712d6eef",
|
||||||
)
|
)
|
||||||
|
|
||||||
indexes = json.loads(await async_load_fixture(hass, "indexes.json", DOMAIN))
|
indexes = await async_load_json_object_fixture(hass, "indexes.json", DOMAIN)
|
||||||
station = json.loads(await async_load_fixture(hass, "station.json", DOMAIN))
|
station = await async_load_json_array_fixture(hass, "station.json", DOMAIN)
|
||||||
sensors = json.loads(await async_load_fixture(hass, "sensors.json", DOMAIN))
|
sensors = await async_load_json_object_fixture(hass, "sensors.json", DOMAIN)
|
||||||
if incomplete_data:
|
if incomplete_data:
|
||||||
indexes["stIndexLevel"]["indexLevelName"] = "foo"
|
indexes["AqIndex"] = "foo"
|
||||||
sensors["pm10"]["values"][0]["value"] = None
|
sensors["pm10"]["Lista danych pomiarowych"][0]["Wartość"] = None
|
||||||
sensors["pm10"]["values"][1]["value"] = None
|
sensors["pm10"]["Lista danych pomiarowych"][1]["Wartość"] = None
|
||||||
if invalid_indexes:
|
if invalid_indexes:
|
||||||
indexes = {}
|
indexes = {}
|
||||||
|
|
||||||
|
@ -1,29 +1,38 @@
|
|||||||
{
|
{
|
||||||
"id": 123,
|
"AqIndex": {
|
||||||
"stCalcDate": "2020-07-31 15:10:17",
|
"Identyfikator stacji pomiarowej": 123,
|
||||||
"stIndexLevel": { "id": 1, "indexLevelName": "Dobry" },
|
"Data wykonania obliczeń indeksu": "2020-07-31 15:10:17",
|
||||||
"stSourceDataDate": "2020-07-31 14:00:00",
|
"Nazwa kategorii indeksu": "Dobry",
|
||||||
"so2CalcDate": "2020-07-31 15:10:17",
|
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika st": "2020-07-31 14:00:00",
|
||||||
"so2IndexLevel": { "id": 0, "indexLevelName": "Bardzo dobry" },
|
"Data wykonania obliczeń indeksu dla wskaźnika SO2": "2020-07-31 15:10:17",
|
||||||
"so2SourceDataDate": "2020-07-31 14:00:00",
|
"Wartość indeksu dla wskaźnika SO2": 0,
|
||||||
"no2CalcDate": 1596201017000,
|
"Nazwa kategorii indeksu dla wskażnika SO2": "Bardzo dobry",
|
||||||
"no2IndexLevel": { "id": 0, "indexLevelName": "Dobry" },
|
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika SO2": "2020-07-31 14:00:00",
|
||||||
"no2SourceDataDate": "2020-07-31 14:00:00",
|
"Data wykonania obliczeń indeksu dla wskaźnika NO2": "2020-07-31 14:00:00",
|
||||||
"coCalcDate": "2020-07-31 15:10:17",
|
"Wartość indeksu dla wskaźnika NO2": 0,
|
||||||
"coIndexLevel": { "id": 0, "indexLevelName": "Dobry" },
|
"Nazwa kategorii indeksu dla wskażnika NO2": "Dobry",
|
||||||
"coSourceDataDate": "2020-07-31 14:00:00",
|
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika NO2": "2020-07-31 14:00:00",
|
||||||
"pm10CalcDate": "2020-07-31 15:10:17",
|
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika CO": "2020-07-31 15:10:17",
|
||||||
"pm10IndexLevel": { "id": 0, "indexLevelName": "Dobry" },
|
"Wartość indeksu dla wskaźnika CO": 0,
|
||||||
"pm10SourceDataDate": "2020-07-31 14:00:00",
|
"Nazwa kategorii indeksu dla wskażnika CO": "Dobry",
|
||||||
"pm25CalcDate": "2020-07-31 15:10:17",
|
"Data wykonania obliczeń indeksu dla wskaźnika CO": "2020-07-31 14:00:00",
|
||||||
"pm25IndexLevel": { "id": 0, "indexLevelName": "Dobry" },
|
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika PM10": "2020-07-31 15:10:17",
|
||||||
"pm25SourceDataDate": "2020-07-31 14:00:00",
|
"Wartość indeksu dla wskaźnika PM10": 0,
|
||||||
"o3CalcDate": "2020-07-31 15:10:17",
|
"Nazwa kategorii indeksu dla wskażnika PM10": "Dobry",
|
||||||
"o3IndexLevel": { "id": 1, "indexLevelName": "Dobry" },
|
"Data wykonania obliczeń indeksu dla wskaźnika PM10": "2020-07-31 14:00:00",
|
||||||
"o3SourceDataDate": "2020-07-31 14:00:00",
|
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika PM2.5": "2020-07-31 15:10:17",
|
||||||
"c6h6CalcDate": "2020-07-31 15:10:17",
|
"Wartość indeksu dla wskaźnika PM2.5": 0,
|
||||||
"c6h6IndexLevel": { "id": 0, "indexLevelName": "Bardzo dobry" },
|
"Nazwa kategorii indeksu dla wskażnika PM2.5": "Dobry",
|
||||||
"c6h6SourceDataDate": "2020-07-31 14:00:00",
|
"Data wykonania obliczeń indeksu dla wskaźnika PM2.5": "2020-07-31 14:00:00",
|
||||||
"stIndexStatus": true,
|
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika O3": "2020-07-31 15:10:17",
|
||||||
"stIndexCrParam": "OZON"
|
"Wartość indeksu dla wskaźnika O3": 1,
|
||||||
|
"Nazwa kategorii indeksu dla wskażnika O3": "Dobry",
|
||||||
|
"Data wykonania obliczeń indeksu dla wskaźnika O3": "2020-07-31 14:00:00",
|
||||||
|
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika C6H6": "2020-07-31 15:10:17",
|
||||||
|
"Wartość indeksu dla wskaźnika C6H6": 0,
|
||||||
|
"Nazwa kategorii indeksu dla wskażnika C6H6": "Bardzo dobry",
|
||||||
|
"Data wykonania obliczeń indeksu dla wskaźnika C6H6": "2020-07-31 14:00:00",
|
||||||
|
"Status indeksu ogólnego dla stacji pomiarowej": true,
|
||||||
|
"Kod zanieczyszczenia krytycznego": "OZON"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,51 +1,51 @@
|
|||||||
{
|
{
|
||||||
"so2": {
|
"so2": {
|
||||||
"values": [
|
"Lista danych pomiarowych": [
|
||||||
{ "date": "2020-07-31 15:00:00", "value": 4.35478 },
|
{ "Data": "2020-07-31 15:00:00", "Wartość": 4.35478 },
|
||||||
{ "date": "2020-07-31 14:00:00", "value": 4.25478 },
|
{ "Data": "2020-07-31 14:00:00", "Wartość": 4.25478 },
|
||||||
{ "date": "2020-07-31 13:00:00", "value": 4.34309 }
|
{ "Data": "2020-07-31 13:00:00", "Wartość": 4.34309 }
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"c6h6": {
|
"c6h6": {
|
||||||
"values": [
|
"Lista danych pomiarowych": [
|
||||||
{ "date": "2020-07-31 15:00:00", "value": 0.23789 },
|
{ "Data": "2020-07-31 15:00:00", "Wartość": 0.23789 },
|
||||||
{ "date": "2020-07-31 14:00:00", "value": 0.22789 },
|
{ "Data": "2020-07-31 14:00:00", "Wartość": 0.22789 },
|
||||||
{ "date": "2020-07-31 13:00:00", "value": 0.21315 }
|
{ "Data": "2020-07-31 13:00:00", "Wartość": 0.21315 }
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"co": {
|
"co": {
|
||||||
"values": [
|
"Lista danych pomiarowych": [
|
||||||
{ "date": "2020-07-31 15:00:00", "value": 251.874 },
|
{ "Data": "2020-07-31 15:00:00", "Wartość": 251.874 },
|
||||||
{ "date": "2020-07-31 14:00:00", "value": 250.874 },
|
{ "Data": "2020-07-31 14:00:00", "Wartość": 250.874 },
|
||||||
{ "date": "2020-07-31 13:00:00", "value": 251.097 }
|
{ "Data": "2020-07-31 13:00:00", "Wartość": 251.097 }
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"no2": {
|
"no2": {
|
||||||
"values": [
|
"Lista danych pomiarowych": [
|
||||||
{ "date": "2020-07-31 15:00:00", "value": 7.13411 },
|
{ "Data": "2020-07-31 15:00:00", "Wartość": 7.13411 },
|
||||||
{ "date": "2020-07-31 14:00:00", "value": 7.33411 },
|
{ "Data": "2020-07-31 14:00:00", "Wartość": 7.33411 },
|
||||||
{ "date": "2020-07-31 13:00:00", "value": 9.32578 }
|
{ "Data": "2020-07-31 13:00:00", "Wartość": 9.32578 }
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"o3": {
|
"o3": {
|
||||||
"values": [
|
"Lista danych pomiarowych": [
|
||||||
{ "date": "2020-07-31 15:00:00", "value": 95.7768 },
|
{ "Data": "2020-07-31 15:00:00", "Wartość": 95.7768 },
|
||||||
{ "date": "2020-07-31 14:00:00", "value": 93.7768 },
|
{ "Data": "2020-07-31 14:00:00", "Wartość": 93.7768 },
|
||||||
{ "date": "2020-07-31 13:00:00", "value": 89.4232 }
|
{ "Data": "2020-07-31 13:00:00", "Wartość": 89.4232 }
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"pm2.5": {
|
"pm2.5": {
|
||||||
"values": [
|
"Lista danych pomiarowych": [
|
||||||
{ "date": "2020-07-31 15:00:00", "value": 4 },
|
{ "Data": "2020-07-31 15:00:00", "Wartość": 4 },
|
||||||
{ "date": "2020-07-31 14:00:00", "value": 4 },
|
{ "Data": "2020-07-31 14:00:00", "Wartość": 4 },
|
||||||
{ "date": "2020-07-31 13:00:00", "value": 5 }
|
{ "Data": "2020-07-31 13:00:00", "Wartość": 5 }
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"pm10": {
|
"pm10": {
|
||||||
"values": [
|
"Lista danych pomiarowych": [
|
||||||
{ "date": "2020-07-31 15:00:00", "value": 16.8344 },
|
{ "Data": "2020-07-31 15:00:00", "Wartość": 16.8344 },
|
||||||
{ "date": "2020-07-31 14:00:00", "value": 17.8344 },
|
{ "Data": "2020-07-31 14:00:00", "Wartość": 17.8344 },
|
||||||
{ "date": "2020-07-31 13:00:00", "value": 20.8094 }
|
{ "Data": "2020-07-31 13:00:00", "Wartość": 20.8094 }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,72 +1,58 @@
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
"id": 672,
|
"Identyfikator stanowiska": 672,
|
||||||
"stationId": 117,
|
"Identyfikator stacji": 117,
|
||||||
"param": {
|
"Wskaźnik": "dwutlenek siarki",
|
||||||
"paramName": "dwutlenek siarki",
|
"Wskaźnik - wzór": "SO2",
|
||||||
"paramFormula": "SO2",
|
"Wskaźnik - kod": "SO2",
|
||||||
"paramCode": "SO2",
|
"Id wskaźnika": 1
|
||||||
"idParam": 1
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 658,
|
"Identyfikator stanowiska": 658,
|
||||||
"stationId": 117,
|
"Identyfikator stacji": 117,
|
||||||
"param": {
|
"Wskaźnik": "benzen",
|
||||||
"paramName": "benzen",
|
"Wskaźnik - wzór": "C6H6",
|
||||||
"paramFormula": "C6H6",
|
"Wskaźnik - kod": "C6H6",
|
||||||
"paramCode": "C6H6",
|
"Id wskaźnika": 10
|
||||||
"idParam": 10
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 660,
|
"Identyfikator stanowiska": 660,
|
||||||
"stationId": 117,
|
"Identyfikator stacji": 117,
|
||||||
"param": {
|
"Wskaźnik": "tlenek węgla",
|
||||||
"paramName": "tlenek węgla",
|
"Wskaźnik - wzór": "CO",
|
||||||
"paramFormula": "CO",
|
"Wskaźnik - kod": "CO",
|
||||||
"paramCode": "CO",
|
"Id wskaźnika": 8
|
||||||
"idParam": 8
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 665,
|
"Identyfikator stanowiska": 665,
|
||||||
"stationId": 117,
|
"Identyfikator stacji": 117,
|
||||||
"param": {
|
"Wskaźnik": "dwutlenek azotu",
|
||||||
"paramName": "dwutlenek azotu",
|
"Wskaźnik - wzór": "NO2",
|
||||||
"paramFormula": "NO2",
|
"Wskaźnik - kod": "NO2",
|
||||||
"paramCode": "NO2",
|
"Id wskaźnika": 6
|
||||||
"idParam": 6
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 667,
|
"Identyfikator stanowiska": 667,
|
||||||
"stationId": 117,
|
"Identyfikator stacji": 117,
|
||||||
"param": {
|
"Wskaźnik": "ozon",
|
||||||
"paramName": "ozon",
|
"Wskaźnik - wzór": "O3",
|
||||||
"paramFormula": "O3",
|
"Wskaźnik - kod": "O3",
|
||||||
"paramCode": "O3",
|
"Id wskaźnika": 5
|
||||||
"idParam": 5
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 670,
|
"Identyfikator stanowiska": 670,
|
||||||
"stationId": 117,
|
"Identyfikator stacji": 117,
|
||||||
"param": {
|
"Wskaźnik": "pył zawieszony PM2.5",
|
||||||
"paramName": "pył zawieszony PM2.5",
|
"Wskaźnik - wzór": "PM2.5",
|
||||||
"paramFormula": "PM2.5",
|
"Wskaźnik - kod": "PM2.5",
|
||||||
"paramCode": "PM2.5",
|
"Id wskaźnika": 69
|
||||||
"idParam": 69
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 14395,
|
"Identyfikator stanowiska": 14395,
|
||||||
"stationId": 117,
|
"Identyfikator stacji": 117,
|
||||||
"param": {
|
"Wskaźnik": "pył zawieszony PM10",
|
||||||
"paramName": "pył zawieszony PM10",
|
"Wskaźnik - wzór": "PM10",
|
||||||
"paramFormula": "PM10",
|
"Wskaźnik - kod": "PM10",
|
||||||
"paramCode": "PM10",
|
"Id wskaźnika": 3
|
||||||
"idParam": 3
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -42,12 +42,14 @@
|
|||||||
'name': 'carbon monoxide',
|
'name': 'carbon monoxide',
|
||||||
'value': 251.874,
|
'value': 251.874,
|
||||||
}),
|
}),
|
||||||
|
'no': None,
|
||||||
'no2': dict({
|
'no2': dict({
|
||||||
'id': 665,
|
'id': 665,
|
||||||
'index': 'good',
|
'index': 'good',
|
||||||
'name': 'nitrogen dioxide',
|
'name': 'nitrogen dioxide',
|
||||||
'value': 7.13411,
|
'value': 7.13411,
|
||||||
}),
|
}),
|
||||||
|
'nox': None,
|
||||||
'o3': dict({
|
'o3': dict({
|
||||||
'id': 667,
|
'id': 667,
|
||||||
'index': 'good',
|
'index': 'good',
|
||||||
|
@ -18,10 +18,18 @@ from tests.common import MockConfigEntry
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"side_eff",
|
("side_eff", "config_entry_state", "active_flows"),
|
||||||
[
|
[
|
||||||
HomeeConnectionFailedException("connection timed out"),
|
(
|
||||||
HomeeAuthFailedException("wrong username or password"),
|
HomeeConnectionFailedException("connection timed out"),
|
||||||
|
ConfigEntryState.SETUP_RETRY,
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
HomeeAuthFailedException("wrong username or password"),
|
||||||
|
ConfigEntryState.SETUP_ERROR,
|
||||||
|
["reauth"],
|
||||||
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_connection_errors(
|
async def test_connection_errors(
|
||||||
@ -29,6 +37,8 @@ async def test_connection_errors(
|
|||||||
mock_homee: MagicMock,
|
mock_homee: MagicMock,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: MockConfigEntry,
|
||||||
side_eff: Exception,
|
side_eff: Exception,
|
||||||
|
config_entry_state: ConfigEntryState,
|
||||||
|
active_flows: list[str],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test if connection errors on startup are handled correctly."""
|
"""Test if connection errors on startup are handled correctly."""
|
||||||
mock_homee.get_access_token.side_effect = side_eff
|
mock_homee.get_access_token.side_effect = side_eff
|
||||||
@ -36,7 +46,11 @@ async def test_connection_errors(
|
|||||||
|
|
||||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
|
|
||||||
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
|
assert mock_config_entry.state is config_entry_state
|
||||||
|
|
||||||
|
assert [
|
||||||
|
flow["context"]["source"] for flow in hass.config_entries.flow.async_progress()
|
||||||
|
] == active_flows
|
||||||
|
|
||||||
|
|
||||||
async def test_connection_listener(
|
async def test_connection_listener(
|
||||||
|
@ -522,24 +522,6 @@ async def test_logging(
|
|||||||
assert "GET /api/states/logging.entity" not in caplog.text
|
assert "GET /api/states/logging.entity" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
async def test_register_static_paths(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
hass_client: ClientSessionGenerator,
|
|
||||||
caplog: pytest.LogCaptureFixture,
|
|
||||||
) -> None:
|
|
||||||
"""Test registering a static path with old api."""
|
|
||||||
assert await async_setup_component(hass, "frontend", {})
|
|
||||||
path = str(Path(__file__).parent)
|
|
||||||
|
|
||||||
match_error = (
|
|
||||||
"Detected code that calls hass.http.register_static_path "
|
|
||||||
"which does blocking I/O in the event loop, instead call "
|
|
||||||
"`await hass.http.async_register_static_paths"
|
|
||||||
)
|
|
||||||
with pytest.raises(RuntimeError, match=match_error):
|
|
||||||
hass.http.register_static_path("/something", path)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_ssl_issue_if_no_urls_configured(
|
async def test_ssl_issue_if_no_urls_configured(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
tmp_path: Path,
|
tmp_path: Path,
|
||||||
|
@ -685,7 +685,7 @@
|
|||||||
'state': 'on',
|
'state': 'on',
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_binary_sensors[silabs_evse_charging][binary_sensor.evse_supply_charging_state-entry]
|
# name: test_binary_sensors[silabs_evse_charging][binary_sensor.evse_charger_supply_state-entry]
|
||||||
EntityRegistryEntrySnapshot({
|
EntityRegistryEntrySnapshot({
|
||||||
'aliases': set({
|
'aliases': set({
|
||||||
}),
|
}),
|
||||||
@ -698,7 +698,7 @@
|
|||||||
'disabled_by': None,
|
'disabled_by': None,
|
||||||
'domain': 'binary_sensor',
|
'domain': 'binary_sensor',
|
||||||
'entity_category': None,
|
'entity_category': None,
|
||||||
'entity_id': 'binary_sensor.evse_supply_charging_state',
|
'entity_id': 'binary_sensor.evse_charger_supply_state',
|
||||||
'has_entity_name': True,
|
'has_entity_name': True,
|
||||||
'hidden_by': None,
|
'hidden_by': None,
|
||||||
'icon': None,
|
'icon': None,
|
||||||
@ -710,24 +710,24 @@
|
|||||||
}),
|
}),
|
||||||
'original_device_class': <BinarySensorDeviceClass.RUNNING: 'running'>,
|
'original_device_class': <BinarySensorDeviceClass.RUNNING: 'running'>,
|
||||||
'original_icon': None,
|
'original_icon': None,
|
||||||
'original_name': 'Supply charging state',
|
'original_name': 'Charger supply state',
|
||||||
'platform': 'matter',
|
'platform': 'matter',
|
||||||
'previous_unique_id': None,
|
'previous_unique_id': None,
|
||||||
'suggested_object_id': None,
|
'suggested_object_id': None,
|
||||||
'supported_features': 0,
|
'supported_features': 0,
|
||||||
'translation_key': 'evse_supply_charging_state',
|
'translation_key': 'evse_supply_state',
|
||||||
'unique_id': '00000000000004D2-0000000000000017-MatterNodeDevice-1-EnergyEvseSupplyStateSensor-153-1',
|
'unique_id': '00000000000004D2-0000000000000017-MatterNodeDevice-1-EnergyEvseSupplyStateSensor-153-1',
|
||||||
'unit_of_measurement': None,
|
'unit_of_measurement': None,
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_binary_sensors[silabs_evse_charging][binary_sensor.evse_supply_charging_state-state]
|
# name: test_binary_sensors[silabs_evse_charging][binary_sensor.evse_charger_supply_state-state]
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
'device_class': 'running',
|
'device_class': 'running',
|
||||||
'friendly_name': 'evse Supply charging state',
|
'friendly_name': 'evse Charger supply state',
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
'entity_id': 'binary_sensor.evse_supply_charging_state',
|
'entity_id': 'binary_sensor.evse_charger_supply_state',
|
||||||
'last_changed': <ANY>,
|
'last_changed': <ANY>,
|
||||||
'last_reported': <ANY>,
|
'last_reported': <ANY>,
|
||||||
'last_updated': <ANY>,
|
'last_updated': <ANY>,
|
||||||
|
@ -184,8 +184,8 @@ async def test_evse_sensor(
|
|||||||
assert state
|
assert state
|
||||||
assert state.state == "off"
|
assert state.state == "off"
|
||||||
|
|
||||||
# Test SupplyStateEnum value with binary_sensor.evse_supply_charging
|
# Test SupplyStateEnum value with binary_sensor.evse_charger_supply_state
|
||||||
entity_id = "binary_sensor.evse_supply_charging_state"
|
entity_id = "binary_sensor.evse_charger_supply_state"
|
||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state
|
assert state
|
||||||
assert state.state == "on"
|
assert state.state == "on"
|
||||||
|
@ -4,6 +4,7 @@ from typing import Any
|
|||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
from nibe.coil import CoilData
|
from nibe.coil import CoilData
|
||||||
|
from nibe.exceptions import WriteDeniedException, WriteException, WriteTimeoutException
|
||||||
from nibe.heatpump import Model
|
from nibe.heatpump import Model
|
||||||
import pytest
|
import pytest
|
||||||
from syrupy.assertion import SnapshotAssertion
|
from syrupy.assertion import SnapshotAssertion
|
||||||
@ -15,6 +16,7 @@ from homeassistant.components.number import (
|
|||||||
)
|
)
|
||||||
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
|
||||||
from . import async_add_model
|
from . import async_add_model
|
||||||
|
|
||||||
@ -108,3 +110,64 @@ async def test_set_value(
|
|||||||
assert isinstance(coil, CoilData)
|
assert isinstance(coil, CoilData)
|
||||||
assert coil.coil.address == address
|
assert coil.coil.address == address
|
||||||
assert coil.value == value
|
assert coil.value == value
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("exception", "translation_key", "translation_placeholders"),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
WriteDeniedException("denied"),
|
||||||
|
"write_denied",
|
||||||
|
{"address": "47398", "value": "25.0"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
WriteTimeoutException("timeout writing"),
|
||||||
|
"write_timeout",
|
||||||
|
{"address": "47398"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
WriteException("failed"),
|
||||||
|
"write_failed",
|
||||||
|
{
|
||||||
|
"address": "47398",
|
||||||
|
"value": "25.0",
|
||||||
|
"error": "failed",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||||
|
async def test_set_value_fail(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_connection: AsyncMock,
|
||||||
|
exception: Exception,
|
||||||
|
translation_key: str,
|
||||||
|
translation_placeholders: dict[str, Any],
|
||||||
|
coils: dict[int, Any],
|
||||||
|
) -> None:
|
||||||
|
"""Test setting of value."""
|
||||||
|
|
||||||
|
value = 25
|
||||||
|
model = Model.F1155
|
||||||
|
address = 47398
|
||||||
|
entity_id = "number.room_sensor_setpoint_s1_47398"
|
||||||
|
coils[address] = 0
|
||||||
|
|
||||||
|
await async_add_model(hass, model)
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get(entity_id)
|
||||||
|
|
||||||
|
mock_connection.write_coil.side_effect = exception
|
||||||
|
|
||||||
|
# Write value
|
||||||
|
with pytest.raises(HomeAssistantError) as exc_info:
|
||||||
|
await hass.services.async_call(
|
||||||
|
PLATFORM_DOMAIN,
|
||||||
|
SERVICE_SET_VALUE,
|
||||||
|
{ATTR_ENTITY_ID: entity_id, ATTR_VALUE: value},
|
||||||
|
blocking=True,
|
||||||
|
)
|
||||||
|
assert exc_info.value.translation_domain == "nibe_heatpump"
|
||||||
|
assert exc_info.value.translation_key == translation_key
|
||||||
|
assert exc_info.value.translation_placeholders == translation_placeholders
|
||||||
|
@ -35,6 +35,7 @@ from openai.types.responses import (
|
|||||||
ResponseWebSearchCallSearchingEvent,
|
ResponseWebSearchCallSearchingEvent,
|
||||||
)
|
)
|
||||||
from openai.types.responses.response import IncompleteDetails
|
from openai.types.responses.response import IncompleteDetails
|
||||||
|
from openai.types.responses.response_function_web_search import ActionSearch
|
||||||
import pytest
|
import pytest
|
||||||
from syrupy.assertion import SnapshotAssertion
|
from syrupy.assertion import SnapshotAssertion
|
||||||
|
|
||||||
@ -95,10 +96,12 @@ def mock_create_stream() -> Generator[AsyncMock]:
|
|||||||
)
|
)
|
||||||
yield ResponseCreatedEvent(
|
yield ResponseCreatedEvent(
|
||||||
response=response,
|
response=response,
|
||||||
|
sequence_number=0,
|
||||||
type="response.created",
|
type="response.created",
|
||||||
)
|
)
|
||||||
yield ResponseInProgressEvent(
|
yield ResponseInProgressEvent(
|
||||||
response=response,
|
response=response,
|
||||||
|
sequence_number=0,
|
||||||
type="response.in_progress",
|
type="response.in_progress",
|
||||||
)
|
)
|
||||||
response.status = "completed"
|
response.status = "completed"
|
||||||
@ -123,16 +126,19 @@ def mock_create_stream() -> Generator[AsyncMock]:
|
|||||||
if response.status == "incomplete":
|
if response.status == "incomplete":
|
||||||
yield ResponseIncompleteEvent(
|
yield ResponseIncompleteEvent(
|
||||||
response=response,
|
response=response,
|
||||||
|
sequence_number=0,
|
||||||
type="response.incomplete",
|
type="response.incomplete",
|
||||||
)
|
)
|
||||||
elif response.status == "failed":
|
elif response.status == "failed":
|
||||||
yield ResponseFailedEvent(
|
yield ResponseFailedEvent(
|
||||||
response=response,
|
response=response,
|
||||||
|
sequence_number=0,
|
||||||
type="response.failed",
|
type="response.failed",
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
yield ResponseCompletedEvent(
|
yield ResponseCompletedEvent(
|
||||||
response=response,
|
response=response,
|
||||||
|
sequence_number=0,
|
||||||
type="response.completed",
|
type="response.completed",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -301,7 +307,7 @@ async def test_incomplete_response(
|
|||||||
"OpenAI response failed: Rate limit exceeded",
|
"OpenAI response failed: Rate limit exceeded",
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ResponseErrorEvent(type="error", message="Some error"),
|
ResponseErrorEvent(type="error", message="Some error", sequence_number=0),
|
||||||
"OpenAI response error: Some error",
|
"OpenAI response error: Some error",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
@ -359,6 +365,7 @@ def create_message_item(
|
|||||||
status="in_progress",
|
status="in_progress",
|
||||||
),
|
),
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_item.added",
|
type="response.output_item.added",
|
||||||
),
|
),
|
||||||
ResponseContentPartAddedEvent(
|
ResponseContentPartAddedEvent(
|
||||||
@ -366,6 +373,7 @@ def create_message_item(
|
|||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
part=content,
|
part=content,
|
||||||
|
sequence_number=0,
|
||||||
type="response.content_part.added",
|
type="response.content_part.added",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
@ -377,6 +385,7 @@ def create_message_item(
|
|||||||
delta=delta,
|
delta=delta,
|
||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_text.delta",
|
type="response.output_text.delta",
|
||||||
)
|
)
|
||||||
for delta in text
|
for delta in text
|
||||||
@ -389,6 +398,7 @@ def create_message_item(
|
|||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
text="".join(text),
|
text="".join(text),
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_text.done",
|
type="response.output_text.done",
|
||||||
),
|
),
|
||||||
ResponseContentPartDoneEvent(
|
ResponseContentPartDoneEvent(
|
||||||
@ -396,6 +406,7 @@ def create_message_item(
|
|||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
part=content,
|
part=content,
|
||||||
|
sequence_number=0,
|
||||||
type="response.content_part.done",
|
type="response.content_part.done",
|
||||||
),
|
),
|
||||||
ResponseOutputItemDoneEvent(
|
ResponseOutputItemDoneEvent(
|
||||||
@ -407,6 +418,7 @@ def create_message_item(
|
|||||||
type="message",
|
type="message",
|
||||||
),
|
),
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_item.done",
|
type="response.output_item.done",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
@ -433,6 +445,7 @@ def create_function_tool_call_item(
|
|||||||
status="in_progress",
|
status="in_progress",
|
||||||
),
|
),
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_item.added",
|
type="response.output_item.added",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
@ -442,6 +455,7 @@ def create_function_tool_call_item(
|
|||||||
delta=delta,
|
delta=delta,
|
||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.function_call_arguments.delta",
|
type="response.function_call_arguments.delta",
|
||||||
)
|
)
|
||||||
for delta in arguments
|
for delta in arguments
|
||||||
@ -452,6 +466,7 @@ def create_function_tool_call_item(
|
|||||||
arguments="".join(arguments),
|
arguments="".join(arguments),
|
||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.function_call_arguments.done",
|
type="response.function_call_arguments.done",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -467,6 +482,7 @@ def create_function_tool_call_item(
|
|||||||
status="completed",
|
status="completed",
|
||||||
),
|
),
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_item.done",
|
type="response.output_item.done",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -485,6 +501,7 @@ def create_reasoning_item(id: str, output_index: int) -> list[ResponseStreamEven
|
|||||||
status=None,
|
status=None,
|
||||||
),
|
),
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_item.added",
|
type="response.output_item.added",
|
||||||
),
|
),
|
||||||
ResponseOutputItemDoneEvent(
|
ResponseOutputItemDoneEvent(
|
||||||
@ -495,6 +512,7 @@ def create_reasoning_item(id: str, output_index: int) -> list[ResponseStreamEven
|
|||||||
status=None,
|
status=None,
|
||||||
),
|
),
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_item.done",
|
type="response.output_item.done",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
@ -505,31 +523,42 @@ def create_web_search_item(id: str, output_index: int) -> list[ResponseStreamEve
|
|||||||
return [
|
return [
|
||||||
ResponseOutputItemAddedEvent(
|
ResponseOutputItemAddedEvent(
|
||||||
item=ResponseFunctionWebSearch(
|
item=ResponseFunctionWebSearch(
|
||||||
id=id, status="in_progress", type="web_search_call"
|
id=id,
|
||||||
|
status="in_progress",
|
||||||
|
action=ActionSearch(query="query", type="search"),
|
||||||
|
type="web_search_call",
|
||||||
),
|
),
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_item.added",
|
type="response.output_item.added",
|
||||||
),
|
),
|
||||||
ResponseWebSearchCallInProgressEvent(
|
ResponseWebSearchCallInProgressEvent(
|
||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.web_search_call.in_progress",
|
type="response.web_search_call.in_progress",
|
||||||
),
|
),
|
||||||
ResponseWebSearchCallSearchingEvent(
|
ResponseWebSearchCallSearchingEvent(
|
||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.web_search_call.searching",
|
type="response.web_search_call.searching",
|
||||||
),
|
),
|
||||||
ResponseWebSearchCallCompletedEvent(
|
ResponseWebSearchCallCompletedEvent(
|
||||||
item_id=id,
|
item_id=id,
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.web_search_call.completed",
|
type="response.web_search_call.completed",
|
||||||
),
|
),
|
||||||
ResponseOutputItemDoneEvent(
|
ResponseOutputItemDoneEvent(
|
||||||
item=ResponseFunctionWebSearch(
|
item=ResponseFunctionWebSearch(
|
||||||
id=id, status="completed", type="web_search_call"
|
id=id,
|
||||||
|
status="completed",
|
||||||
|
action=ActionSearch(query="query", type="search"),
|
||||||
|
type="web_search_call",
|
||||||
),
|
),
|
||||||
output_index=output_index,
|
output_index=output_index,
|
||||||
|
sequence_number=0,
|
||||||
type="response.output_item.done",
|
type="response.output_item.done",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
@ -588,6 +617,7 @@ async def test_function_call(
|
|||||||
"id": "rs_A",
|
"id": "rs_A",
|
||||||
"summary": [],
|
"summary": [],
|
||||||
"type": "reasoning",
|
"type": "reasoning",
|
||||||
|
"encrypted_content": None,
|
||||||
}
|
}
|
||||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||||
# Don't test the prompt, as it's not deterministic
|
# Don't test the prompt, as it's not deterministic
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""The tests for the REST sensor platform."""
|
"""The tests for the REST sensor platform."""
|
||||||
|
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
|
import logging
|
||||||
import ssl
|
import ssl
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
@ -19,6 +20,14 @@ from homeassistant.const import (
|
|||||||
ATTR_DEVICE_CLASS,
|
ATTR_DEVICE_CLASS,
|
||||||
ATTR_ENTITY_ID,
|
ATTR_ENTITY_ID,
|
||||||
ATTR_UNIT_OF_MEASUREMENT,
|
ATTR_UNIT_OF_MEASUREMENT,
|
||||||
|
CONF_DEVICE_CLASS,
|
||||||
|
CONF_FORCE_UPDATE,
|
||||||
|
CONF_METHOD,
|
||||||
|
CONF_NAME,
|
||||||
|
CONF_PARAMS,
|
||||||
|
CONF_RESOURCE,
|
||||||
|
CONF_UNIT_OF_MEASUREMENT,
|
||||||
|
CONF_VALUE_TEMPLATE,
|
||||||
CONTENT_TYPE_JSON,
|
CONTENT_TYPE_JSON,
|
||||||
SERVICE_RELOAD,
|
SERVICE_RELOAD,
|
||||||
STATE_UNAVAILABLE,
|
STATE_UNAVAILABLE,
|
||||||
@ -162,6 +171,94 @@ async def test_setup_encoding(
|
|||||||
assert hass.states.get("sensor.mysensor").state == "tack själv"
|
assert hass.states.get("sensor.mysensor").state == "tack själv"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_setup_auto_encoding_from_content_type(
|
||||||
|
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||||
|
) -> None:
|
||||||
|
"""Test setup with encoding auto-detected from Content-Type header."""
|
||||||
|
# Test with ISO-8859-1 charset in Content-Type header
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://localhost",
|
||||||
|
status=HTTPStatus.OK,
|
||||||
|
content="Björk Guðmundsdóttir".encode("iso-8859-1"),
|
||||||
|
headers={"Content-Type": "text/plain; charset=iso-8859-1"},
|
||||||
|
)
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass,
|
||||||
|
SENSOR_DOMAIN,
|
||||||
|
{
|
||||||
|
SENSOR_DOMAIN: {
|
||||||
|
"name": "mysensor",
|
||||||
|
# encoding defaults to UTF-8, but should be ignored when charset present
|
||||||
|
"platform": DOMAIN,
|
||||||
|
"resource": "http://localhost",
|
||||||
|
"method": "GET",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||||
|
assert hass.states.get("sensor.mysensor").state == "Björk Guðmundsdóttir"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_setup_encoding_fallback_no_charset(
|
||||||
|
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||||
|
) -> None:
|
||||||
|
"""Test that configured encoding is used when no charset in Content-Type."""
|
||||||
|
# No charset in Content-Type header
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://localhost",
|
||||||
|
status=HTTPStatus.OK,
|
||||||
|
content="Björk Guðmundsdóttir".encode("iso-8859-1"),
|
||||||
|
headers={"Content-Type": "text/plain"}, # No charset!
|
||||||
|
)
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass,
|
||||||
|
SENSOR_DOMAIN,
|
||||||
|
{
|
||||||
|
SENSOR_DOMAIN: {
|
||||||
|
"name": "mysensor",
|
||||||
|
"encoding": "iso-8859-1", # This will be used as fallback
|
||||||
|
"platform": DOMAIN,
|
||||||
|
"resource": "http://localhost",
|
||||||
|
"method": "GET",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||||
|
assert hass.states.get("sensor.mysensor").state == "Björk Guðmundsdóttir"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_setup_charset_overrides_encoding_config(
|
||||||
|
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||||
|
) -> None:
|
||||||
|
"""Test that charset in Content-Type overrides configured encoding."""
|
||||||
|
# Server sends UTF-8 with correct charset header
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://localhost",
|
||||||
|
status=HTTPStatus.OK,
|
||||||
|
content="Björk Guðmundsdóttir".encode(),
|
||||||
|
headers={"Content-Type": "text/plain; charset=utf-8"},
|
||||||
|
)
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass,
|
||||||
|
SENSOR_DOMAIN,
|
||||||
|
{
|
||||||
|
SENSOR_DOMAIN: {
|
||||||
|
"name": "mysensor",
|
||||||
|
"encoding": "iso-8859-1", # Config says ISO-8859-1, but charset=utf-8 should win
|
||||||
|
"platform": DOMAIN,
|
||||||
|
"resource": "http://localhost",
|
||||||
|
"method": "GET",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||||
|
# This should work because charset=utf-8 overrides the iso-8859-1 config
|
||||||
|
assert hass.states.get("sensor.mysensor").state == "Björk Guðmundsdóttir"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("ssl_cipher_list", "ssl_cipher_list_expected"),
|
("ssl_cipher_list", "ssl_cipher_list_expected"),
|
||||||
[
|
[
|
||||||
@ -978,6 +1075,124 @@ async def test_update_with_failed_get(
|
|||||||
assert "Empty reply" in caplog.text
|
assert "Empty reply" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
async def test_query_param_dict_value(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
aioclient_mock: AiohttpClientMocker,
|
||||||
|
) -> None:
|
||||||
|
"""Test dict values in query params are handled for backward compatibility."""
|
||||||
|
# Mock response
|
||||||
|
aioclient_mock.post(
|
||||||
|
"https://www.envertecportal.com/ApiInverters/QueryTerminalReal",
|
||||||
|
status=HTTPStatus.OK,
|
||||||
|
json={"Data": {"QueryResults": [{"POWER": 1500}]}},
|
||||||
|
)
|
||||||
|
|
||||||
|
# This test checks that when template_complex processes a string that looks like
|
||||||
|
# a dict/list, it converts it to an actual dict/list, which then needs to be
|
||||||
|
# handled by our backward compatibility code
|
||||||
|
with caplog.at_level(logging.DEBUG, logger="homeassistant.components.rest.data"):
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
{
|
||||||
|
DOMAIN: [
|
||||||
|
{
|
||||||
|
CONF_RESOURCE: (
|
||||||
|
"https://www.envertecportal.com/ApiInverters/"
|
||||||
|
"QueryTerminalReal"
|
||||||
|
),
|
||||||
|
CONF_METHOD: "POST",
|
||||||
|
CONF_PARAMS: {
|
||||||
|
"page": "1",
|
||||||
|
"perPage": "20",
|
||||||
|
"orderBy": "SN",
|
||||||
|
# When processed by template.render_complex, certain
|
||||||
|
# strings might be converted to dicts/lists if they
|
||||||
|
# look like JSON
|
||||||
|
"whereCondition": (
|
||||||
|
"{{ {'STATIONID': 'A6327A17797C1234'} }}"
|
||||||
|
), # Template that evaluates to dict
|
||||||
|
},
|
||||||
|
"sensor": [
|
||||||
|
{
|
||||||
|
CONF_NAME: "Solar MPPT1 Power",
|
||||||
|
CONF_VALUE_TEMPLATE: (
|
||||||
|
"{{ value_json.Data.QueryResults[0].POWER }}"
|
||||||
|
),
|
||||||
|
CONF_DEVICE_CLASS: "power",
|
||||||
|
CONF_UNIT_OF_MEASUREMENT: "W",
|
||||||
|
CONF_FORCE_UPDATE: True,
|
||||||
|
"state_class": "measurement",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# The sensor should be created successfully with backward compatibility
|
||||||
|
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||||
|
state = hass.states.get("sensor.solar_mppt1_power")
|
||||||
|
assert state is not None
|
||||||
|
assert state.state == "1500"
|
||||||
|
|
||||||
|
# Check that a debug message was logged about the parameter conversion
|
||||||
|
assert "REST query parameter 'whereCondition' has type" in caplog.text
|
||||||
|
assert "converting to string" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
async def test_query_param_json_string_preserved(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
aioclient_mock: AiohttpClientMocker,
|
||||||
|
) -> None:
|
||||||
|
"""Test that JSON strings in query params are preserved and not converted to dicts."""
|
||||||
|
# Mock response
|
||||||
|
aioclient_mock.get(
|
||||||
|
"https://api.example.com/data",
|
||||||
|
status=HTTPStatus.OK,
|
||||||
|
json={"value": 42},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Config with JSON string (quoted) - should remain a string
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
{
|
||||||
|
DOMAIN: [
|
||||||
|
{
|
||||||
|
CONF_RESOURCE: "https://api.example.com/data",
|
||||||
|
CONF_METHOD: "GET",
|
||||||
|
CONF_PARAMS: {
|
||||||
|
"filter": '{"type": "sensor", "id": 123}', # JSON string
|
||||||
|
"normal": "value",
|
||||||
|
},
|
||||||
|
"sensor": [
|
||||||
|
{
|
||||||
|
CONF_NAME: "Test Sensor",
|
||||||
|
CONF_VALUE_TEMPLATE: "{{ value_json.value }}",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Check the sensor was created
|
||||||
|
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||||
|
state = hass.states.get("sensor.test_sensor")
|
||||||
|
assert state is not None
|
||||||
|
assert state.state == "42"
|
||||||
|
|
||||||
|
# Verify the request was made with the JSON string intact
|
||||||
|
assert len(aioclient_mock.mock_calls) == 1
|
||||||
|
method, url, data, headers = aioclient_mock.mock_calls[0]
|
||||||
|
assert url.query["filter"] == '{"type": "sensor", "id": 123}'
|
||||||
|
assert url.query["normal"] == "value"
|
||||||
|
|
||||||
|
|
||||||
async def test_reload(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None:
|
async def test_reload(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None:
|
||||||
"""Verify we can reload reset sensors."""
|
"""Verify we can reload reset sensors."""
|
||||||
|
|
||||||
|
@ -214,25 +214,12 @@ class MockSoCo(MagicMock):
|
|||||||
surround_level = 3
|
surround_level = 3
|
||||||
music_surround_level = 4
|
music_surround_level = 4
|
||||||
soundbar_audio_input_format = "Dolby 5.1"
|
soundbar_audio_input_format = "Dolby 5.1"
|
||||||
factory: SoCoMockFactory | None = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def visible_zones(self):
|
def visible_zones(self):
|
||||||
"""Return visible zones and allow property to be overridden by device classes."""
|
"""Return visible zones and allow property to be overridden by device classes."""
|
||||||
return {self}
|
return {self}
|
||||||
|
|
||||||
@property
|
|
||||||
def all_zones(self) -> set[MockSoCo]:
|
|
||||||
"""Return a set of all mock zones, or just self if no factory or zones."""
|
|
||||||
if self.factory is not None:
|
|
||||||
if zones := self.factory.mock_all_zones:
|
|
||||||
return zones
|
|
||||||
return {self}
|
|
||||||
|
|
||||||
def set_factory(self, factory: SoCoMockFactory) -> None:
|
|
||||||
"""Set the factory for this mock."""
|
|
||||||
self.factory = factory
|
|
||||||
|
|
||||||
|
|
||||||
class SoCoMockFactory:
|
class SoCoMockFactory:
|
||||||
"""Factory for creating SoCo Mocks."""
|
"""Factory for creating SoCo Mocks."""
|
||||||
@ -257,19 +244,11 @@ class SoCoMockFactory:
|
|||||||
self.sonos_playlists = sonos_playlists
|
self.sonos_playlists = sonos_playlists
|
||||||
self.sonos_queue = sonos_queue
|
self.sonos_queue = sonos_queue
|
||||||
|
|
||||||
@property
|
|
||||||
def mock_all_zones(self) -> set[MockSoCo]:
|
|
||||||
"""Return a set of all mock zones."""
|
|
||||||
return {
|
|
||||||
mock for mock in self.mock_list.values() if mock.mock_include_in_all_zones
|
|
||||||
}
|
|
||||||
|
|
||||||
def cache_mock(
|
def cache_mock(
|
||||||
self, mock_soco: MockSoCo, ip_address: str, name: str = "Zone A"
|
self, mock_soco: MockSoCo, ip_address: str, name: str = "Zone A"
|
||||||
) -> MockSoCo:
|
) -> MockSoCo:
|
||||||
"""Put a user created mock into the cache."""
|
"""Put a user created mock into the cache."""
|
||||||
mock_soco.mock_add_spec(SoCo)
|
mock_soco.mock_add_spec(SoCo)
|
||||||
mock_soco.set_factory(self)
|
|
||||||
mock_soco.ip_address = ip_address
|
mock_soco.ip_address = ip_address
|
||||||
if ip_address != "192.168.42.2":
|
if ip_address != "192.168.42.2":
|
||||||
mock_soco.uid += f"_{ip_address}"
|
mock_soco.uid += f"_{ip_address}"
|
||||||
@ -281,11 +260,6 @@ class SoCoMockFactory:
|
|||||||
my_speaker_info = self.speaker_info.copy()
|
my_speaker_info = self.speaker_info.copy()
|
||||||
my_speaker_info["zone_name"] = name
|
my_speaker_info["zone_name"] = name
|
||||||
my_speaker_info["uid"] = mock_soco.uid
|
my_speaker_info["uid"] = mock_soco.uid
|
||||||
# Generate a different MAC for the non-default speakers.
|
|
||||||
# otherwise new devices will not be created.
|
|
||||||
if ip_address != "192.168.42.2":
|
|
||||||
last_octet = ip_address.split(".")[-1]
|
|
||||||
my_speaker_info["mac_address"] = f"00-00-00-00-00-{last_octet.zfill(2)}"
|
|
||||||
mock_soco.get_speaker_info = Mock(return_value=my_speaker_info)
|
mock_soco.get_speaker_info = Mock(return_value=my_speaker_info)
|
||||||
mock_soco.add_to_queue = Mock(return_value=10)
|
mock_soco.add_to_queue = Mock(return_value=10)
|
||||||
mock_soco.add_uri_to_queue = Mock(return_value=10)
|
mock_soco.add_uri_to_queue = Mock(return_value=10)
|
||||||
@ -304,7 +278,7 @@ class SoCoMockFactory:
|
|||||||
|
|
||||||
mock_soco.alarmClock = self.alarm_clock
|
mock_soco.alarmClock = self.alarm_clock
|
||||||
mock_soco.get_battery_info.return_value = self.battery_info
|
mock_soco.get_battery_info.return_value = self.battery_info
|
||||||
mock_soco.mock_include_in_all_zones = True
|
mock_soco.all_zones = {mock_soco}
|
||||||
mock_soco.group.coordinator = mock_soco
|
mock_soco.group.coordinator = mock_soco
|
||||||
mock_soco.household_id = "test_household_id"
|
mock_soco.household_id = "test_household_id"
|
||||||
self.mock_list[ip_address] = mock_soco
|
self.mock_list[ip_address] = mock_soco
|
||||||
|
@ -324,15 +324,10 @@ async def test_async_poll_manual_hosts_5(
|
|||||||
soco_1 = soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room")
|
soco_1 = soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room")
|
||||||
soco_1.renderingControl = Mock()
|
soco_1.renderingControl = Mock()
|
||||||
soco_1.renderingControl.GetVolume = Mock()
|
soco_1.renderingControl.GetVolume = Mock()
|
||||||
# Unavailable speakers should not be included in all zones
|
|
||||||
soco_1.mock_include_in_all_zones = False
|
|
||||||
|
|
||||||
speaker_1_activity = SpeakerActivity(hass, soco_1)
|
speaker_1_activity = SpeakerActivity(hass, soco_1)
|
||||||
soco_2 = soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom")
|
soco_2 = soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom")
|
||||||
soco_2.renderingControl = Mock()
|
soco_2.renderingControl = Mock()
|
||||||
soco_2.renderingControl.GetVolume = Mock()
|
soco_2.renderingControl.GetVolume = Mock()
|
||||||
soco_2.mock_include_in_all_zones = False
|
|
||||||
|
|
||||||
speaker_2_activity = SpeakerActivity(hass, soco_2)
|
speaker_2_activity = SpeakerActivity(hass, soco_2)
|
||||||
|
|
||||||
with caplog.at_level(logging.DEBUG):
|
with caplog.at_level(logging.DEBUG):
|
||||||
|
@ -26,10 +26,10 @@ from homeassistant.const import (
|
|||||||
STATE_ON,
|
STATE_ON,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from .conftest import MockSoCo, SonosMockEvent, SonosMockService
|
from .conftest import MockSoCo, SonosMockEvent
|
||||||
|
|
||||||
from tests.common import async_fire_time_changed
|
from tests.common import async_fire_time_changed
|
||||||
|
|
||||||
@ -211,53 +211,3 @@ async def test_alarm_create_delete(
|
|||||||
|
|
||||||
assert "switch.sonos_alarm_14" in entity_registry.entities
|
assert "switch.sonos_alarm_14" in entity_registry.entities
|
||||||
assert "switch.sonos_alarm_15" not in entity_registry.entities
|
assert "switch.sonos_alarm_15" not in entity_registry.entities
|
||||||
|
|
||||||
|
|
||||||
async def test_alarm_change_device(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
async_setup_sonos,
|
|
||||||
soco: MockSoCo,
|
|
||||||
alarm_clock: SonosMockService,
|
|
||||||
alarm_clock_extended: SonosMockService,
|
|
||||||
alarm_event: SonosMockEvent,
|
|
||||||
entity_registry: er.EntityRegistry,
|
|
||||||
device_registry: dr.DeviceRegistry,
|
|
||||||
sonos_setup_two_speakers: list[MockSoCo],
|
|
||||||
) -> None:
|
|
||||||
"""Test Sonos Alarm being moved to a different speaker.
|
|
||||||
|
|
||||||
This test simulates a scenario where an alarm is created on one speaker
|
|
||||||
and then moved to another speaker. It checks that the entity is correctly
|
|
||||||
created on the new speaker and removed from the old one.
|
|
||||||
"""
|
|
||||||
entity_id = "switch.sonos_alarm_14"
|
|
||||||
soco_lr = sonos_setup_two_speakers[0]
|
|
||||||
|
|
||||||
await async_setup_sonos()
|
|
||||||
|
|
||||||
# Initially, the alarm is created on the soco mock
|
|
||||||
assert entity_id in entity_registry.entities
|
|
||||||
entity = entity_registry.async_get(entity_id)
|
|
||||||
device = device_registry.async_get(entity.device_id)
|
|
||||||
assert device.name == soco.get_speaker_info()["zone_name"]
|
|
||||||
|
|
||||||
# Simulate the alarm being moved to the soco_lr speaker
|
|
||||||
alarm_update = copy(alarm_clock_extended.ListAlarms.return_value)
|
|
||||||
alarm_update["CurrentAlarmList"] = alarm_update["CurrentAlarmList"].replace(
|
|
||||||
"RINCON_test", f"{soco_lr.uid}"
|
|
||||||
)
|
|
||||||
alarm_clock.ListAlarms.return_value = alarm_update
|
|
||||||
|
|
||||||
# Update the alarm_list_version so it gets processed.
|
|
||||||
alarm_event.variables["alarm_list_version"] = f"{soco_lr.uid}:1000"
|
|
||||||
alarm_update["CurrentAlarmListVersion"] = alarm_event.increment_variable(
|
|
||||||
"alarm_list_version"
|
|
||||||
)
|
|
||||||
|
|
||||||
alarm_clock.subscribe.return_value.callback(event=alarm_event)
|
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
|
||||||
|
|
||||||
assert entity_id in entity_registry.entities
|
|
||||||
alarm_14 = entity_registry.async_get(entity_id)
|
|
||||||
device = device_registry.async_get(alarm_14.device_id)
|
|
||||||
assert device.name == soco_lr.get_speaker_info()["zone_name"]
|
|
||||||
|
@ -562,16 +562,10 @@ async def test_vacuum_log_deprecated_battery_properties_using_attr(
|
|||||||
# Test we only log once
|
# Test we only log once
|
||||||
assert (
|
assert (
|
||||||
"Detected that custom integration 'test' is setting the battery_level which has been deprecated."
|
"Detected that custom integration 'test' is setting the battery_level which has been deprecated."
|
||||||
" Integration test should implement a sensor instead with a correct device class and link it to"
|
|
||||||
" the same device. This will stop working in Home Assistant 2026.8,"
|
|
||||||
" please report it to the author of the 'test' custom integration"
|
|
||||||
not in caplog.text
|
not in caplog.text
|
||||||
)
|
)
|
||||||
assert (
|
assert (
|
||||||
"Detected that custom integration 'test' is setting the battery_icon which has been deprecated."
|
"Detected that custom integration 'test' is setting the battery_icon which has been deprecated."
|
||||||
" Integration test should implement a sensor instead with a correct device class and link it to"
|
|
||||||
" the same device. This will stop working in Home Assistant 2026.8,"
|
|
||||||
" please report it to the author of the 'test' custom integration"
|
|
||||||
not in caplog.text
|
not in caplog.text
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -613,3 +607,34 @@ async def test_vacuum_log_deprecated_battery_supported_feature(
|
|||||||
", please report it to the author of the 'test' custom integration"
|
", please report it to the author of the 'test' custom integration"
|
||||||
in caplog.text
|
in caplog.text
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_vacuum_not_log_deprecated_battery_properties_during_init(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test not logging deprecation until after added to hass."""
|
||||||
|
|
||||||
|
class MockLegacyVacuum(MockVacuum):
|
||||||
|
"""Mocked vacuum entity."""
|
||||||
|
|
||||||
|
def __init__(self, **kwargs: Any) -> None:
|
||||||
|
"""Initialize a mock vacuum entity."""
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self._attr_battery_level = 50
|
||||||
|
|
||||||
|
@property
|
||||||
|
def activity(self) -> str:
|
||||||
|
"""Return the state of the entity."""
|
||||||
|
return VacuumActivity.CLEANING
|
||||||
|
|
||||||
|
entity = MockLegacyVacuum(
|
||||||
|
name="Testing",
|
||||||
|
entity_id="vacuum.test",
|
||||||
|
)
|
||||||
|
assert entity.battery_level == 50
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"Detected that custom integration 'test' is setting the battery_level which has been deprecated."
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
@ -7,165 +7,22 @@ import pytest
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
from homeassistant.components.wallbox.const import (
|
from homeassistant.components.wallbox.const import (
|
||||||
CHARGER_ADDED_ENERGY_KEY,
|
|
||||||
CHARGER_ADDED_RANGE_KEY,
|
|
||||||
CHARGER_CHARGING_POWER_KEY,
|
|
||||||
CHARGER_CHARGING_SPEED_KEY,
|
|
||||||
CHARGER_CURRENCY_KEY,
|
|
||||||
CHARGER_CURRENT_VERSION_KEY,
|
|
||||||
CHARGER_DATA_KEY,
|
|
||||||
CHARGER_DATA_POST_L1_KEY,
|
CHARGER_DATA_POST_L1_KEY,
|
||||||
CHARGER_DATA_POST_L2_KEY,
|
CHARGER_DATA_POST_L2_KEY,
|
||||||
CHARGER_ECO_SMART_KEY,
|
|
||||||
CHARGER_ECO_SMART_MODE_KEY,
|
|
||||||
CHARGER_ECO_SMART_STATUS_KEY,
|
|
||||||
CHARGER_ENERGY_PRICE_KEY,
|
CHARGER_ENERGY_PRICE_KEY,
|
||||||
CHARGER_FEATURES_KEY,
|
|
||||||
CHARGER_LOCKED_UNLOCKED_KEY,
|
CHARGER_LOCKED_UNLOCKED_KEY,
|
||||||
CHARGER_MAX_AVAILABLE_POWER_KEY,
|
|
||||||
CHARGER_MAX_CHARGING_CURRENT_KEY,
|
|
||||||
CHARGER_MAX_CHARGING_CURRENT_POST_KEY,
|
CHARGER_MAX_CHARGING_CURRENT_POST_KEY,
|
||||||
CHARGER_MAX_ICP_CURRENT_KEY,
|
CHARGER_MAX_ICP_CURRENT_KEY,
|
||||||
CHARGER_NAME_KEY,
|
|
||||||
CHARGER_PART_NUMBER_KEY,
|
|
||||||
CHARGER_PLAN_KEY,
|
|
||||||
CHARGER_POWER_BOOST_KEY,
|
|
||||||
CHARGER_SERIAL_NUMBER_KEY,
|
|
||||||
CHARGER_SOFTWARE_KEY,
|
|
||||||
CHARGER_STATUS_ID_KEY,
|
|
||||||
CONF_STATION,
|
CONF_STATION,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
)
|
)
|
||||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from .const import ERROR, REFRESH_TOKEN_TTL, STATUS, TTL, USER_ID
|
from .const import WALLBOX_AUTHORISATION_RESPONSE, WALLBOX_STATUS_RESPONSE
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
test_response = {
|
|
||||||
CHARGER_CHARGING_POWER_KEY: 0,
|
|
||||||
CHARGER_STATUS_ID_KEY: 193,
|
|
||||||
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
|
||||||
CHARGER_CHARGING_SPEED_KEY: 0,
|
|
||||||
CHARGER_ADDED_RANGE_KEY: 150,
|
|
||||||
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
|
||||||
CHARGER_NAME_KEY: "WallboxName",
|
|
||||||
CHARGER_DATA_KEY: {
|
|
||||||
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
|
||||||
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
|
||||||
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
|
||||||
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
|
||||||
CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E",
|
|
||||||
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
|
||||||
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
|
||||||
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
|
||||||
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]},
|
|
||||||
CHARGER_ECO_SMART_KEY: {
|
|
||||||
CHARGER_ECO_SMART_STATUS_KEY: False,
|
|
||||||
CHARGER_ECO_SMART_MODE_KEY: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
test_response_bidir = {
|
|
||||||
CHARGER_CHARGING_POWER_KEY: 0,
|
|
||||||
CHARGER_STATUS_ID_KEY: 193,
|
|
||||||
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
|
||||||
CHARGER_CHARGING_SPEED_KEY: 0,
|
|
||||||
CHARGER_ADDED_RANGE_KEY: 150,
|
|
||||||
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
|
||||||
CHARGER_NAME_KEY: "WallboxName",
|
|
||||||
CHARGER_DATA_KEY: {
|
|
||||||
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
|
||||||
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
|
||||||
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
|
||||||
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
|
||||||
CHARGER_PART_NUMBER_KEY: "QSP1-0-2-4-9-002-E",
|
|
||||||
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
|
||||||
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
|
||||||
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
|
||||||
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]},
|
|
||||||
CHARGER_ECO_SMART_KEY: {
|
|
||||||
CHARGER_ECO_SMART_STATUS_KEY: False,
|
|
||||||
CHARGER_ECO_SMART_MODE_KEY: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
test_response_eco_mode = {
|
|
||||||
CHARGER_CHARGING_POWER_KEY: 0,
|
|
||||||
CHARGER_STATUS_ID_KEY: 193,
|
|
||||||
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
|
||||||
CHARGER_CHARGING_SPEED_KEY: 0,
|
|
||||||
CHARGER_ADDED_RANGE_KEY: 150,
|
|
||||||
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
|
||||||
CHARGER_NAME_KEY: "WallboxName",
|
|
||||||
CHARGER_DATA_KEY: {
|
|
||||||
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
|
||||||
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
|
||||||
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
|
||||||
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
|
||||||
CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E",
|
|
||||||
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
|
||||||
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
|
||||||
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
|
||||||
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]},
|
|
||||||
CHARGER_ECO_SMART_KEY: {
|
|
||||||
CHARGER_ECO_SMART_STATUS_KEY: True,
|
|
||||||
CHARGER_ECO_SMART_MODE_KEY: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
test_response_full_solar = {
|
|
||||||
CHARGER_CHARGING_POWER_KEY: 0,
|
|
||||||
CHARGER_STATUS_ID_KEY: 193,
|
|
||||||
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
|
||||||
CHARGER_CHARGING_SPEED_KEY: 0,
|
|
||||||
CHARGER_ADDED_RANGE_KEY: 150,
|
|
||||||
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
|
||||||
CHARGER_NAME_KEY: "WallboxName",
|
|
||||||
CHARGER_DATA_KEY: {
|
|
||||||
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
|
||||||
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
|
||||||
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
|
||||||
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
|
||||||
CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E",
|
|
||||||
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
|
||||||
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
|
||||||
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
|
||||||
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]},
|
|
||||||
CHARGER_ECO_SMART_KEY: {
|
|
||||||
CHARGER_ECO_SMART_STATUS_KEY: True,
|
|
||||||
CHARGER_ECO_SMART_MODE_KEY: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
test_response_no_power_boost = {
|
|
||||||
CHARGER_CHARGING_POWER_KEY: 0,
|
|
||||||
CHARGER_STATUS_ID_KEY: 193,
|
|
||||||
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
|
||||||
CHARGER_CHARGING_SPEED_KEY: 0,
|
|
||||||
CHARGER_ADDED_RANGE_KEY: 150,
|
|
||||||
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
|
||||||
CHARGER_NAME_KEY: "WallboxName",
|
|
||||||
CHARGER_DATA_KEY: {
|
|
||||||
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
|
||||||
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
|
||||||
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
|
||||||
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
|
||||||
CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E",
|
|
||||||
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
|
||||||
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
|
||||||
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
|
||||||
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: []},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
http_403_error = requests.exceptions.HTTPError()
|
http_403_error = requests.exceptions.HTTPError()
|
||||||
http_403_error.response = requests.Response()
|
http_403_error.response = requests.Response()
|
||||||
http_403_error.response.status_code = HTTPStatus.FORBIDDEN
|
http_403_error.response.status_code = HTTPStatus.FORBIDDEN
|
||||||
@ -176,45 +33,6 @@ http_429_error = requests.exceptions.HTTPError()
|
|||||||
http_429_error.response = requests.Response()
|
http_429_error.response = requests.Response()
|
||||||
http_429_error.response.status_code = HTTPStatus.TOO_MANY_REQUESTS
|
http_429_error.response.status_code = HTTPStatus.TOO_MANY_REQUESTS
|
||||||
|
|
||||||
authorisation_response = {
|
|
||||||
"data": {
|
|
||||||
"attributes": {
|
|
||||||
"token": "fakekeyhere",
|
|
||||||
"refresh_token": "refresh_fakekeyhere",
|
|
||||||
USER_ID: 12345,
|
|
||||||
TTL: 145656758,
|
|
||||||
REFRESH_TOKEN_TTL: 145756758,
|
|
||||||
ERROR: "false",
|
|
||||||
STATUS: 200,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
authorisation_response_unauthorised = {
|
|
||||||
"data": {
|
|
||||||
"attributes": {
|
|
||||||
"token": "fakekeyhere",
|
|
||||||
"refresh_token": "refresh_fakekeyhere",
|
|
||||||
USER_ID: 12345,
|
|
||||||
TTL: 145656758,
|
|
||||||
REFRESH_TOKEN_TTL: 145756758,
|
|
||||||
ERROR: "false",
|
|
||||||
STATUS: 404,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
invalid_reauth_response = {
|
|
||||||
"jwt": "fakekeyhere",
|
|
||||||
"refresh_token": "refresh_fakekeyhere",
|
|
||||||
"user_id": 12345,
|
|
||||||
"ttl": 145656758,
|
|
||||||
"refresh_token_ttl": 145756758,
|
|
||||||
"error": False,
|
|
||||||
"status": 200,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def entry(hass: HomeAssistant) -> MockConfigEntry:
|
def entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||||
@ -237,7 +55,7 @@ def mock_wallbox():
|
|||||||
"""Patch Wallbox class for tests."""
|
"""Patch Wallbox class for tests."""
|
||||||
with patch("homeassistant.components.wallbox.Wallbox") as mock:
|
with patch("homeassistant.components.wallbox.Wallbox") as mock:
|
||||||
wallbox = MagicMock()
|
wallbox = MagicMock()
|
||||||
wallbox.authenticate = Mock(return_value=authorisation_response)
|
wallbox.authenticate = Mock(return_value=WALLBOX_AUTHORISATION_RESPONSE)
|
||||||
wallbox.lockCharger = Mock(
|
wallbox.lockCharger = Mock(
|
||||||
return_value={
|
return_value={
|
||||||
CHARGER_DATA_POST_L1_KEY: {
|
CHARGER_DATA_POST_L1_KEY: {
|
||||||
@ -263,7 +81,7 @@ def mock_wallbox():
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
wallbox.setIcpMaxCurrent = Mock(return_value={CHARGER_MAX_ICP_CURRENT_KEY: 25})
|
wallbox.setIcpMaxCurrent = Mock(return_value={CHARGER_MAX_ICP_CURRENT_KEY: 25})
|
||||||
wallbox.getChargerStatus = Mock(return_value=test_response)
|
wallbox.getChargerStatus = Mock(return_value=WALLBOX_STATUS_RESPONSE)
|
||||||
mock.return_value = wallbox
|
mock.return_value = wallbox
|
||||||
yield wallbox
|
yield wallbox
|
||||||
|
|
||||||
|
@ -1,5 +1,31 @@
|
|||||||
"""Provides constants for Wallbox component tests."""
|
"""Provides constants for Wallbox component tests."""
|
||||||
|
|
||||||
|
from homeassistant.components.wallbox.const import (
|
||||||
|
CHARGER_ADDED_ENERGY_KEY,
|
||||||
|
CHARGER_ADDED_RANGE_KEY,
|
||||||
|
CHARGER_CHARGING_POWER_KEY,
|
||||||
|
CHARGER_CHARGING_SPEED_KEY,
|
||||||
|
CHARGER_CURRENCY_KEY,
|
||||||
|
CHARGER_CURRENT_VERSION_KEY,
|
||||||
|
CHARGER_DATA_KEY,
|
||||||
|
CHARGER_ECO_SMART_KEY,
|
||||||
|
CHARGER_ECO_SMART_MODE_KEY,
|
||||||
|
CHARGER_ECO_SMART_STATUS_KEY,
|
||||||
|
CHARGER_ENERGY_PRICE_KEY,
|
||||||
|
CHARGER_FEATURES_KEY,
|
||||||
|
CHARGER_LOCKED_UNLOCKED_KEY,
|
||||||
|
CHARGER_MAX_AVAILABLE_POWER_KEY,
|
||||||
|
CHARGER_MAX_CHARGING_CURRENT_KEY,
|
||||||
|
CHARGER_MAX_ICP_CURRENT_KEY,
|
||||||
|
CHARGER_NAME_KEY,
|
||||||
|
CHARGER_PART_NUMBER_KEY,
|
||||||
|
CHARGER_PLAN_KEY,
|
||||||
|
CHARGER_POWER_BOOST_KEY,
|
||||||
|
CHARGER_SERIAL_NUMBER_KEY,
|
||||||
|
CHARGER_SOFTWARE_KEY,
|
||||||
|
CHARGER_STATUS_ID_KEY,
|
||||||
|
)
|
||||||
|
|
||||||
JWT = "jwt"
|
JWT = "jwt"
|
||||||
USER_ID = "user_id"
|
USER_ID = "user_id"
|
||||||
TTL = "ttl"
|
TTL = "ttl"
|
||||||
@ -7,6 +33,169 @@ REFRESH_TOKEN_TTL = "refresh_token_ttl"
|
|||||||
ERROR = "error"
|
ERROR = "error"
|
||||||
STATUS = "status"
|
STATUS = "status"
|
||||||
|
|
||||||
|
WALLBOX_STATUS_RESPONSE = {
|
||||||
|
CHARGER_CHARGING_POWER_KEY: 0,
|
||||||
|
CHARGER_STATUS_ID_KEY: 193,
|
||||||
|
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
||||||
|
CHARGER_CHARGING_SPEED_KEY: 0,
|
||||||
|
CHARGER_ADDED_RANGE_KEY: 150,
|
||||||
|
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
||||||
|
CHARGER_NAME_KEY: "WallboxName",
|
||||||
|
CHARGER_DATA_KEY: {
|
||||||
|
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
||||||
|
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
||||||
|
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
||||||
|
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
||||||
|
CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E",
|
||||||
|
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
||||||
|
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
||||||
|
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
||||||
|
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]},
|
||||||
|
CHARGER_ECO_SMART_KEY: {
|
||||||
|
CHARGER_ECO_SMART_STATUS_KEY: False,
|
||||||
|
CHARGER_ECO_SMART_MODE_KEY: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
WALLBOX_STATUS_RESPONSE_BIDIR = {
|
||||||
|
CHARGER_CHARGING_POWER_KEY: 0,
|
||||||
|
CHARGER_STATUS_ID_KEY: 193,
|
||||||
|
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
||||||
|
CHARGER_CHARGING_SPEED_KEY: 0,
|
||||||
|
CHARGER_ADDED_RANGE_KEY: 150,
|
||||||
|
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
||||||
|
CHARGER_NAME_KEY: "WallboxName",
|
||||||
|
CHARGER_DATA_KEY: {
|
||||||
|
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
||||||
|
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
||||||
|
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
||||||
|
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
||||||
|
CHARGER_PART_NUMBER_KEY: "QSP1-0-2-4-9-002-E",
|
||||||
|
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
||||||
|
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
||||||
|
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
||||||
|
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]},
|
||||||
|
CHARGER_ECO_SMART_KEY: {
|
||||||
|
CHARGER_ECO_SMART_STATUS_KEY: False,
|
||||||
|
CHARGER_ECO_SMART_MODE_KEY: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
WALLBOX_STATUS_RESPONSE_ECO_MODE = {
|
||||||
|
CHARGER_CHARGING_POWER_KEY: 0,
|
||||||
|
CHARGER_STATUS_ID_KEY: 193,
|
||||||
|
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
||||||
|
CHARGER_CHARGING_SPEED_KEY: 0,
|
||||||
|
CHARGER_ADDED_RANGE_KEY: 150,
|
||||||
|
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
||||||
|
CHARGER_NAME_KEY: "WallboxName",
|
||||||
|
CHARGER_DATA_KEY: {
|
||||||
|
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
||||||
|
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
||||||
|
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
||||||
|
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
||||||
|
CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E",
|
||||||
|
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
||||||
|
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
||||||
|
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
||||||
|
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]},
|
||||||
|
CHARGER_ECO_SMART_KEY: {
|
||||||
|
CHARGER_ECO_SMART_STATUS_KEY: True,
|
||||||
|
CHARGER_ECO_SMART_MODE_KEY: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
WALLBOX_STATUS_RESPONSE_FULL_SOLAR = {
|
||||||
|
CHARGER_CHARGING_POWER_KEY: 0,
|
||||||
|
CHARGER_STATUS_ID_KEY: 193,
|
||||||
|
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
||||||
|
CHARGER_CHARGING_SPEED_KEY: 0,
|
||||||
|
CHARGER_ADDED_RANGE_KEY: 150,
|
||||||
|
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
||||||
|
CHARGER_NAME_KEY: "WallboxName",
|
||||||
|
CHARGER_DATA_KEY: {
|
||||||
|
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
||||||
|
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
||||||
|
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
||||||
|
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
||||||
|
CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E",
|
||||||
|
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
||||||
|
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
||||||
|
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
||||||
|
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]},
|
||||||
|
CHARGER_ECO_SMART_KEY: {
|
||||||
|
CHARGER_ECO_SMART_STATUS_KEY: True,
|
||||||
|
CHARGER_ECO_SMART_MODE_KEY: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
WALLBOX_STATUS_RESPONSE_NO_POWER_BOOST = {
|
||||||
|
CHARGER_CHARGING_POWER_KEY: 0,
|
||||||
|
CHARGER_STATUS_ID_KEY: 193,
|
||||||
|
CHARGER_MAX_AVAILABLE_POWER_KEY: 25.0,
|
||||||
|
CHARGER_CHARGING_SPEED_KEY: 0,
|
||||||
|
CHARGER_ADDED_RANGE_KEY: 150,
|
||||||
|
CHARGER_ADDED_ENERGY_KEY: 44.697,
|
||||||
|
CHARGER_NAME_KEY: "WallboxName",
|
||||||
|
CHARGER_DATA_KEY: {
|
||||||
|
CHARGER_MAX_CHARGING_CURRENT_KEY: 24,
|
||||||
|
CHARGER_ENERGY_PRICE_KEY: 0.4,
|
||||||
|
CHARGER_LOCKED_UNLOCKED_KEY: False,
|
||||||
|
CHARGER_SERIAL_NUMBER_KEY: "20000",
|
||||||
|
CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E",
|
||||||
|
CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"},
|
||||||
|
CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"},
|
||||||
|
CHARGER_MAX_ICP_CURRENT_KEY: 20,
|
||||||
|
CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: []},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
WALLBOX_AUTHORISATION_RESPONSE = {
|
||||||
|
"data": {
|
||||||
|
"attributes": {
|
||||||
|
"token": "fakekeyhere",
|
||||||
|
"refresh_token": "refresh_fakekeyhere",
|
||||||
|
USER_ID: 12345,
|
||||||
|
TTL: 145656758,
|
||||||
|
REFRESH_TOKEN_TTL: 145756758,
|
||||||
|
ERROR: "false",
|
||||||
|
STATUS: 200,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
WALLBOX_AUTHORISATION_RESPONSE_UNAUTHORISED = {
|
||||||
|
"data": {
|
||||||
|
"attributes": {
|
||||||
|
"token": "fakekeyhere",
|
||||||
|
"refresh_token": "refresh_fakekeyhere",
|
||||||
|
USER_ID: 12345,
|
||||||
|
TTL: 145656758,
|
||||||
|
REFRESH_TOKEN_TTL: 145756758,
|
||||||
|
ERROR: "false",
|
||||||
|
STATUS: 404,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
WALLBOX_INVALID_REAUTH_RESPONSE = {
|
||||||
|
"jwt": "fakekeyhere",
|
||||||
|
"refresh_token": "refresh_fakekeyhere",
|
||||||
|
"user_id": 12345,
|
||||||
|
"ttl": 145656758,
|
||||||
|
"refresh_token_ttl": 145756758,
|
||||||
|
"error": False,
|
||||||
|
"status": 200,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
MOCK_NUMBER_ENTITY_ID = "number.wallbox_wallboxname_maximum_charging_current"
|
MOCK_NUMBER_ENTITY_ID = "number.wallbox_wallboxname_maximum_charging_current"
|
||||||
MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID = "number.wallbox_wallboxname_energy_price"
|
MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID = "number.wallbox_wallboxname_energy_price"
|
||||||
MOCK_NUMBER_ENTITY_ICP_CURRENT_ID = "number.wallbox_wallboxname_maximum_icp_current"
|
MOCK_NUMBER_ENTITY_ICP_CURRENT_ID = "number.wallbox_wallboxname_maximum_icp_current"
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.components.wallbox import config_flow
|
|
||||||
from homeassistant.components.wallbox.const import (
|
from homeassistant.components.wallbox.const import (
|
||||||
CHARGER_ADDED_ENERGY_KEY,
|
CHARGER_ADDED_ENERGY_KEY,
|
||||||
CHARGER_ADDED_RANGE_KEY,
|
CHARGER_ADDED_RANGE_KEY,
|
||||||
@ -18,12 +17,10 @@ from homeassistant.config_entries import ConfigEntryState
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.data_entry_flow import FlowResultType
|
from homeassistant.data_entry_flow import FlowResultType
|
||||||
|
|
||||||
from .conftest import (
|
from .conftest import http_403_error, http_404_error, setup_integration
|
||||||
authorisation_response,
|
from .const import (
|
||||||
authorisation_response_unauthorised,
|
WALLBOX_AUTHORISATION_RESPONSE,
|
||||||
http_403_error,
|
WALLBOX_AUTHORISATION_RESPONSE_UNAUTHORISED,
|
||||||
http_404_error,
|
|
||||||
setup_integration,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
@ -40,10 +37,9 @@ test_response = {
|
|||||||
|
|
||||||
async def test_show_set_form(hass: HomeAssistant, mock_wallbox) -> None:
|
async def test_show_set_form(hass: HomeAssistant, mock_wallbox) -> None:
|
||||||
"""Test that the setup form is served."""
|
"""Test that the setup form is served."""
|
||||||
flow = config_flow.WallboxConfigFlow()
|
result = await hass.config_entries.flow.async_init(
|
||||||
flow.hass = hass
|
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||||
result = await flow.async_step_user(user_input=None)
|
)
|
||||||
|
|
||||||
assert result["type"] is FlowResultType.FORM
|
assert result["type"] is FlowResultType.FORM
|
||||||
assert result["step_id"] == "user"
|
assert result["step_id"] == "user"
|
||||||
|
|
||||||
@ -112,7 +108,7 @@ async def test_form_validate_input(hass: HomeAssistant) -> None:
|
|||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
"homeassistant.components.wallbox.Wallbox.authenticate",
|
"homeassistant.components.wallbox.Wallbox.authenticate",
|
||||||
return_value=authorisation_response,
|
return_value=WALLBOX_AUTHORISATION_RESPONSE,
|
||||||
),
|
),
|
||||||
patch(
|
patch(
|
||||||
"homeassistant.components.wallbox.Wallbox.pauseChargingSession",
|
"homeassistant.components.wallbox.Wallbox.pauseChargingSession",
|
||||||
@ -143,7 +139,7 @@ async def test_form_reauth(
|
|||||||
patch.object(
|
patch.object(
|
||||||
mock_wallbox,
|
mock_wallbox,
|
||||||
"authenticate",
|
"authenticate",
|
||||||
return_value=authorisation_response_unauthorised,
|
return_value=WALLBOX_AUTHORISATION_RESPONSE_UNAUTHORISED,
|
||||||
),
|
),
|
||||||
patch.object(mock_wallbox, "getChargerStatus", return_value=test_response),
|
patch.object(mock_wallbox, "getChargerStatus", return_value=test_response),
|
||||||
):
|
):
|
||||||
@ -176,7 +172,7 @@ async def test_form_reauth_invalid(
|
|||||||
patch.object(
|
patch.object(
|
||||||
mock_wallbox,
|
mock_wallbox,
|
||||||
"authenticate",
|
"authenticate",
|
||||||
return_value=authorisation_response_unauthorised,
|
return_value=WALLBOX_AUTHORISATION_RESPONSE_UNAUTHORISED,
|
||||||
),
|
),
|
||||||
patch.object(mock_wallbox, "getChargerStatus", return_value=test_response),
|
patch.object(mock_wallbox, "getChargerStatus", return_value=test_response),
|
||||||
):
|
):
|
||||||
|
@ -1,19 +1,23 @@
|
|||||||
"""Test Wallbox Init Component."""
|
"""Test Wallbox Init Component."""
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from homeassistant.components.wallbox.const import DOMAIN
|
import pytest
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
|
|
||||||
from .conftest import (
|
from homeassistant.components.input_number import ATTR_VALUE, SERVICE_SET_VALUE
|
||||||
http_403_error,
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
http_429_error,
|
from homeassistant.const import ATTR_ENTITY_ID
|
||||||
setup_integration,
|
from homeassistant.core import HomeAssistant
|
||||||
test_response_no_power_boost,
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
|
||||||
|
from .conftest import http_403_error, http_429_error, setup_integration
|
||||||
|
from .const import (
|
||||||
|
MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID,
|
||||||
|
WALLBOX_STATUS_RESPONSE_NO_POWER_BOOST,
|
||||||
)
|
)
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||||
|
|
||||||
|
|
||||||
async def test_wallbox_setup_unload_entry(
|
async def test_wallbox_setup_unload_entry(
|
||||||
@ -40,24 +44,25 @@ async def test_wallbox_unload_entry_connection_error(
|
|||||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||||
|
|
||||||
|
|
||||||
async def test_wallbox_refresh_failed_connection_error_auth(
|
async def test_wallbox_refresh_failed_connection_error_too_many_requests(
|
||||||
hass: HomeAssistant, entry: MockConfigEntry, mock_wallbox
|
hass: HomeAssistant, entry: MockConfigEntry, mock_wallbox
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test Wallbox setup with connection error."""
|
"""Test Wallbox setup with connection error."""
|
||||||
|
|
||||||
await setup_integration(hass, entry)
|
with patch.object(mock_wallbox, "getChargerStatus", side_effect=http_429_error):
|
||||||
assert entry.state is ConfigEntryState.LOADED
|
await setup_integration(hass, entry)
|
||||||
|
assert entry.state is ConfigEntryState.SETUP_RETRY
|
||||||
|
|
||||||
with patch.object(mock_wallbox, "authenticate", side_effect=http_429_error):
|
await hass.async_block_till_done()
|
||||||
wallbox = hass.data[DOMAIN][entry.entry_id]
|
|
||||||
await wallbox.async_refresh()
|
|
||||||
|
|
||||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||||
|
|
||||||
|
|
||||||
async def test_wallbox_refresh_failed_invalid_auth(
|
async def test_wallbox_refresh_failed_error_auth(
|
||||||
hass: HomeAssistant, entry: MockConfigEntry, mock_wallbox
|
hass: HomeAssistant,
|
||||||
|
entry: MockConfigEntry,
|
||||||
|
mock_wallbox,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test Wallbox setup with authentication error."""
|
"""Test Wallbox setup with authentication error."""
|
||||||
|
|
||||||
@ -66,11 +71,31 @@ async def test_wallbox_refresh_failed_invalid_auth(
|
|||||||
|
|
||||||
with (
|
with (
|
||||||
patch.object(mock_wallbox, "authenticate", side_effect=http_403_error),
|
patch.object(mock_wallbox, "authenticate", side_effect=http_403_error),
|
||||||
patch.object(mock_wallbox, "pauseChargingSession", side_effect=http_403_error),
|
pytest.raises(HomeAssistantError),
|
||||||
):
|
):
|
||||||
wallbox = hass.data[DOMAIN][entry.entry_id]
|
await hass.services.async_call(
|
||||||
|
"number",
|
||||||
|
SERVICE_SET_VALUE,
|
||||||
|
{
|
||||||
|
ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID,
|
||||||
|
ATTR_VALUE: 1.1,
|
||||||
|
},
|
||||||
|
blocking=True,
|
||||||
|
)
|
||||||
|
|
||||||
await wallbox.async_refresh()
|
with (
|
||||||
|
patch.object(mock_wallbox, "authenticate", side_effect=http_429_error),
|
||||||
|
pytest.raises(HomeAssistantError),
|
||||||
|
):
|
||||||
|
await hass.services.async_call(
|
||||||
|
"number",
|
||||||
|
SERVICE_SET_VALUE,
|
||||||
|
{
|
||||||
|
ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID,
|
||||||
|
ATTR_VALUE: 1.1,
|
||||||
|
},
|
||||||
|
blocking=True,
|
||||||
|
)
|
||||||
|
|
||||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||||
@ -81,13 +106,10 @@ async def test_wallbox_refresh_failed_http_error(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test Wallbox setup with authentication error."""
|
"""Test Wallbox setup with authentication error."""
|
||||||
|
|
||||||
await setup_integration(hass, entry)
|
|
||||||
assert entry.state is ConfigEntryState.LOADED
|
|
||||||
|
|
||||||
with patch.object(mock_wallbox, "getChargerStatus", side_effect=http_403_error):
|
with patch.object(mock_wallbox, "getChargerStatus", side_effect=http_403_error):
|
||||||
wallbox = hass.data[DOMAIN][entry.entry_id]
|
await setup_integration(hass, entry)
|
||||||
|
assert entry.state is ConfigEntryState.SETUP_RETRY
|
||||||
await wallbox.async_refresh()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||||
@ -102,9 +124,8 @@ async def test_wallbox_refresh_failed_too_many_requests(
|
|||||||
assert entry.state is ConfigEntryState.LOADED
|
assert entry.state is ConfigEntryState.LOADED
|
||||||
|
|
||||||
with patch.object(mock_wallbox, "getChargerStatus", side_effect=http_429_error):
|
with patch.object(mock_wallbox, "getChargerStatus", side_effect=http_429_error):
|
||||||
wallbox = hass.data[DOMAIN][entry.entry_id]
|
async_fire_time_changed(hass, datetime.now() + timedelta(seconds=120), True)
|
||||||
|
await hass.async_block_till_done()
|
||||||
await wallbox.async_refresh()
|
|
||||||
|
|
||||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||||
@ -119,9 +140,8 @@ async def test_wallbox_refresh_failed_connection_error(
|
|||||||
assert entry.state is ConfigEntryState.LOADED
|
assert entry.state is ConfigEntryState.LOADED
|
||||||
|
|
||||||
with patch.object(mock_wallbox, "pauseChargingSession", side_effect=http_403_error):
|
with patch.object(mock_wallbox, "pauseChargingSession", side_effect=http_403_error):
|
||||||
wallbox = hass.data[DOMAIN][entry.entry_id]
|
async_fire_time_changed(hass, datetime.now() + timedelta(seconds=120), True)
|
||||||
|
await hass.async_block_till_done()
|
||||||
await wallbox.async_refresh()
|
|
||||||
|
|
||||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||||
@ -132,7 +152,9 @@ async def test_wallbox_setup_load_entry_no_eco_mode(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test Wallbox Unload."""
|
"""Test Wallbox Unload."""
|
||||||
with patch.object(
|
with patch.object(
|
||||||
mock_wallbox, "getChargerStatus", return_value=test_response_no_power_boost
|
mock_wallbox,
|
||||||
|
"getChargerStatus",
|
||||||
|
return_value=WALLBOX_STATUS_RESPONSE_NO_POWER_BOOST,
|
||||||
):
|
):
|
||||||
await setup_integration(hass, entry)
|
await setup_integration(hass, entry)
|
||||||
assert entry.state is ConfigEntryState.LOADED
|
assert entry.state is ConfigEntryState.LOADED
|
||||||
|
@ -11,17 +11,12 @@ from homeassistant.const import ATTR_ENTITY_ID
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
|
||||||
from .conftest import (
|
from .conftest import http_403_error, http_404_error, http_429_error, setup_integration
|
||||||
http_403_error,
|
|
||||||
http_404_error,
|
|
||||||
http_429_error,
|
|
||||||
setup_integration,
|
|
||||||
test_response_bidir,
|
|
||||||
)
|
|
||||||
from .const import (
|
from .const import (
|
||||||
MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID,
|
MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID,
|
||||||
MOCK_NUMBER_ENTITY_ICP_CURRENT_ID,
|
MOCK_NUMBER_ENTITY_ICP_CURRENT_ID,
|
||||||
MOCK_NUMBER_ENTITY_ID,
|
MOCK_NUMBER_ENTITY_ID,
|
||||||
|
WALLBOX_STATUS_RESPONSE_BIDIR,
|
||||||
)
|
)
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
@ -53,7 +48,7 @@ async def test_wallbox_number_power_class_bidir(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test wallbox sensor class."""
|
"""Test wallbox sensor class."""
|
||||||
with patch.object(
|
with patch.object(
|
||||||
mock_wallbox, "getChargerStatus", return_value=test_response_bidir
|
mock_wallbox, "getChargerStatus", return_value=WALLBOX_STATUS_RESPONSE_BIDIR
|
||||||
):
|
):
|
||||||
await setup_integration(hass, entry)
|
await setup_integration(hass, entry)
|
||||||
|
|
||||||
|
@ -13,23 +13,21 @@ from homeassistant.components.wallbox.const import EcoSmartMode
|
|||||||
from homeassistant.const import ATTR_ENTITY_ID
|
from homeassistant.const import ATTR_ENTITY_ID
|
||||||
from homeassistant.core import HomeAssistant, HomeAssistantError
|
from homeassistant.core import HomeAssistant, HomeAssistantError
|
||||||
|
|
||||||
from .conftest import (
|
from .conftest import http_404_error, http_429_error, setup_integration
|
||||||
http_404_error,
|
from .const import (
|
||||||
http_429_error,
|
MOCK_SELECT_ENTITY_ID,
|
||||||
setup_integration,
|
WALLBOX_STATUS_RESPONSE,
|
||||||
test_response,
|
WALLBOX_STATUS_RESPONSE_ECO_MODE,
|
||||||
test_response_eco_mode,
|
WALLBOX_STATUS_RESPONSE_FULL_SOLAR,
|
||||||
test_response_full_solar,
|
WALLBOX_STATUS_RESPONSE_NO_POWER_BOOST,
|
||||||
test_response_no_power_boost,
|
|
||||||
)
|
)
|
||||||
from .const import MOCK_SELECT_ENTITY_ID
|
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
TEST_OPTIONS = [
|
TEST_OPTIONS = [
|
||||||
(EcoSmartMode.OFF, test_response),
|
(EcoSmartMode.OFF, WALLBOX_STATUS_RESPONSE),
|
||||||
(EcoSmartMode.ECO_MODE, test_response_eco_mode),
|
(EcoSmartMode.ECO_MODE, WALLBOX_STATUS_RESPONSE_ECO_MODE),
|
||||||
(EcoSmartMode.FULL_SOLAR, test_response_full_solar),
|
(EcoSmartMode.FULL_SOLAR, WALLBOX_STATUS_RESPONSE_FULL_SOLAR),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -61,7 +59,9 @@ async def test_wallbox_select_no_power_boost_class(
|
|||||||
"""Test wallbox select class."""
|
"""Test wallbox select class."""
|
||||||
|
|
||||||
with patch.object(
|
with patch.object(
|
||||||
mock_wallbox, "getChargerStatus", return_value=test_response_no_power_boost
|
mock_wallbox,
|
||||||
|
"getChargerStatus",
|
||||||
|
return_value=WALLBOX_STATUS_RESPONSE_NO_POWER_BOOST,
|
||||||
):
|
):
|
||||||
await setup_integration(hass, entry)
|
await setup_integration(hass, entry)
|
||||||
|
|
||||||
|
@ -4,7 +4,12 @@ from aiowebostv import WebOsTvPairError
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.components.webostv.const import CONF_SOURCES, DOMAIN, LIVE_TV_APP_ID
|
from homeassistant.components.webostv.const import (
|
||||||
|
CONF_SOURCES,
|
||||||
|
DEFAULT_NAME,
|
||||||
|
DOMAIN,
|
||||||
|
LIVE_TV_APP_ID,
|
||||||
|
)
|
||||||
from homeassistant.config_entries import SOURCE_SSDP
|
from homeassistant.config_entries import SOURCE_SSDP
|
||||||
from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST, CONF_SOURCE
|
from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST, CONF_SOURCE
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@ -63,6 +68,29 @@ async def test_form(hass: HomeAssistant, client) -> None:
|
|||||||
assert config_entry.unique_id == FAKE_UUID
|
assert config_entry.unique_id == FAKE_UUID
|
||||||
|
|
||||||
|
|
||||||
|
async def test_form_no_model_name(hass: HomeAssistant, client) -> None:
|
||||||
|
"""Test successful user flow without model name."""
|
||||||
|
client.tv_info.system = {}
|
||||||
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN,
|
||||||
|
context={CONF_SOURCE: config_entries.SOURCE_USER},
|
||||||
|
data=MOCK_USER_CONFIG,
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.FORM
|
||||||
|
assert result["step_id"] == "pairing"
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"], user_input={}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||||
|
assert result["title"] == DEFAULT_NAME
|
||||||
|
config_entry = result["result"]
|
||||||
|
assert config_entry.unique_id == FAKE_UUID
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("apps", "inputs"),
|
("apps", "inputs"),
|
||||||
[
|
[
|
||||||
|
203
tests/components/zone/test_condition.py
Normal file
203
tests/components/zone/test_condition.py
Normal file
@ -0,0 +1,203 @@
|
|||||||
|
"""The tests for the location condition."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from homeassistant.components.zone import condition as zone_condition
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConditionError
|
||||||
|
from homeassistant.helpers import condition, config_validation as cv
|
||||||
|
|
||||||
|
|
||||||
|
async def test_zone_raises(hass: HomeAssistant) -> None:
|
||||||
|
"""Test that zone raises ConditionError on errors."""
|
||||||
|
config = {
|
||||||
|
"condition": "zone",
|
||||||
|
"entity_id": "device_tracker.cat",
|
||||||
|
"zone": "zone.home",
|
||||||
|
}
|
||||||
|
config = cv.CONDITION_SCHEMA(config)
|
||||||
|
config = await condition.async_validate_condition_config(hass, config)
|
||||||
|
test = await condition.async_from_config(hass, config)
|
||||||
|
|
||||||
|
with pytest.raises(ConditionError, match="no zone"):
|
||||||
|
zone_condition.zone(hass, zone_ent=None, entity="sensor.any")
|
||||||
|
|
||||||
|
with pytest.raises(ConditionError, match="unknown zone"):
|
||||||
|
test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"zone.home",
|
||||||
|
"zoning",
|
||||||
|
{"name": "home", "latitude": 2.1, "longitude": 1.1, "radius": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(ConditionError, match="no entity"):
|
||||||
|
zone_condition.zone(hass, zone_ent="zone.home", entity=None)
|
||||||
|
|
||||||
|
with pytest.raises(ConditionError, match="unknown entity"):
|
||||||
|
test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.cat",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "cat"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(ConditionError, match="latitude"):
|
||||||
|
test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.cat",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "cat", "latitude": 2.1},
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(ConditionError, match="longitude"):
|
||||||
|
test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.cat",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "cat", "latitude": 2.1, "longitude": 1.1},
|
||||||
|
)
|
||||||
|
|
||||||
|
# All okay, now test multiple failed conditions
|
||||||
|
assert test(hass)
|
||||||
|
|
||||||
|
config = {
|
||||||
|
"condition": "zone",
|
||||||
|
"entity_id": ["device_tracker.cat", "device_tracker.dog"],
|
||||||
|
"zone": ["zone.home", "zone.work"],
|
||||||
|
}
|
||||||
|
config = cv.CONDITION_SCHEMA(config)
|
||||||
|
config = await condition.async_validate_condition_config(hass, config)
|
||||||
|
test = await condition.async_from_config(hass, config)
|
||||||
|
|
||||||
|
with pytest.raises(ConditionError, match="dog"):
|
||||||
|
test(hass)
|
||||||
|
|
||||||
|
with pytest.raises(ConditionError, match="work"):
|
||||||
|
test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"zone.work",
|
||||||
|
"zoning",
|
||||||
|
{"name": "work", "latitude": 20, "longitude": 10, "radius": 25000},
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.dog",
|
||||||
|
"work",
|
||||||
|
{"friendly_name": "dog", "latitude": 20.1, "longitude": 10.1},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert test(hass)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_zone_multiple_entities(hass: HomeAssistant) -> None:
|
||||||
|
"""Test with multiple entities in condition."""
|
||||||
|
config = {
|
||||||
|
"condition": "and",
|
||||||
|
"conditions": [
|
||||||
|
{
|
||||||
|
"alias": "Zone Condition",
|
||||||
|
"condition": "zone",
|
||||||
|
"entity_id": ["device_tracker.person_1", "device_tracker.person_2"],
|
||||||
|
"zone": "zone.home",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
config = cv.CONDITION_SCHEMA(config)
|
||||||
|
config = await condition.async_validate_condition_config(hass, config)
|
||||||
|
test = await condition.async_from_config(hass, config)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"zone.home",
|
||||||
|
"zoning",
|
||||||
|
{"name": "home", "latitude": 2.1, "longitude": 1.1, "radius": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person_1",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person_1", "latitude": 2.1, "longitude": 1.1},
|
||||||
|
)
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person_2",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person_2", "latitude": 2.1, "longitude": 1.1},
|
||||||
|
)
|
||||||
|
assert test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person_1",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person_1", "latitude": 20.1, "longitude": 10.1},
|
||||||
|
)
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person_2",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person_2", "latitude": 2.1, "longitude": 1.1},
|
||||||
|
)
|
||||||
|
assert not test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person_1",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person_1", "latitude": 2.1, "longitude": 1.1},
|
||||||
|
)
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person_2",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person_2", "latitude": 20.1, "longitude": 10.1},
|
||||||
|
)
|
||||||
|
assert not test(hass)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_multiple_zones(hass: HomeAssistant) -> None:
|
||||||
|
"""Test with multiple entities in condition."""
|
||||||
|
config = {
|
||||||
|
"condition": "and",
|
||||||
|
"conditions": [
|
||||||
|
{
|
||||||
|
"condition": "zone",
|
||||||
|
"entity_id": "device_tracker.person",
|
||||||
|
"zone": ["zone.home", "zone.work"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
config = cv.CONDITION_SCHEMA(config)
|
||||||
|
config = await condition.async_validate_condition_config(hass, config)
|
||||||
|
test = await condition.async_from_config(hass, config)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"zone.home",
|
||||||
|
"zoning",
|
||||||
|
{"name": "home", "latitude": 2.1, "longitude": 1.1, "radius": 10},
|
||||||
|
)
|
||||||
|
hass.states.async_set(
|
||||||
|
"zone.work",
|
||||||
|
"zoning",
|
||||||
|
{"name": "work", "latitude": 20.1, "longitude": 10.1, "radius": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person", "latitude": 2.1, "longitude": 1.1},
|
||||||
|
)
|
||||||
|
assert test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person", "latitude": 20.1, "longitude": 10.1},
|
||||||
|
)
|
||||||
|
assert test(hass)
|
||||||
|
|
||||||
|
hass.states.async_set(
|
||||||
|
"device_tracker.person",
|
||||||
|
"home",
|
||||||
|
{"friendly_name": "person", "latitude": 50.1, "longitude": 20.1},
|
||||||
|
)
|
||||||
|
assert not test(hass)
|
@ -1892,201 +1892,6 @@ async def test_numeric_state_using_input_number(hass: HomeAssistant) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_zone_raises(hass: HomeAssistant) -> None:
|
|
||||||
"""Test that zone raises ConditionError on errors."""
|
|
||||||
config = {
|
|
||||||
"condition": "zone",
|
|
||||||
"entity_id": "device_tracker.cat",
|
|
||||||
"zone": "zone.home",
|
|
||||||
}
|
|
||||||
config = cv.CONDITION_SCHEMA(config)
|
|
||||||
config = await condition.async_validate_condition_config(hass, config)
|
|
||||||
test = await condition.async_from_config(hass, config)
|
|
||||||
|
|
||||||
with pytest.raises(ConditionError, match="no zone"):
|
|
||||||
condition.zone(hass, zone_ent=None, entity="sensor.any")
|
|
||||||
|
|
||||||
with pytest.raises(ConditionError, match="unknown zone"):
|
|
||||||
test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"zone.home",
|
|
||||||
"zoning",
|
|
||||||
{"name": "home", "latitude": 2.1, "longitude": 1.1, "radius": 10},
|
|
||||||
)
|
|
||||||
|
|
||||||
with pytest.raises(ConditionError, match="no entity"):
|
|
||||||
condition.zone(hass, zone_ent="zone.home", entity=None)
|
|
||||||
|
|
||||||
with pytest.raises(ConditionError, match="unknown entity"):
|
|
||||||
test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.cat",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "cat"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with pytest.raises(ConditionError, match="latitude"):
|
|
||||||
test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.cat",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "cat", "latitude": 2.1},
|
|
||||||
)
|
|
||||||
|
|
||||||
with pytest.raises(ConditionError, match="longitude"):
|
|
||||||
test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.cat",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "cat", "latitude": 2.1, "longitude": 1.1},
|
|
||||||
)
|
|
||||||
|
|
||||||
# All okay, now test multiple failed conditions
|
|
||||||
assert test(hass)
|
|
||||||
|
|
||||||
config = {
|
|
||||||
"condition": "zone",
|
|
||||||
"entity_id": ["device_tracker.cat", "device_tracker.dog"],
|
|
||||||
"zone": ["zone.home", "zone.work"],
|
|
||||||
}
|
|
||||||
config = cv.CONDITION_SCHEMA(config)
|
|
||||||
config = await condition.async_validate_condition_config(hass, config)
|
|
||||||
test = await condition.async_from_config(hass, config)
|
|
||||||
|
|
||||||
with pytest.raises(ConditionError, match="dog"):
|
|
||||||
test(hass)
|
|
||||||
|
|
||||||
with pytest.raises(ConditionError, match="work"):
|
|
||||||
test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"zone.work",
|
|
||||||
"zoning",
|
|
||||||
{"name": "work", "latitude": 20, "longitude": 10, "radius": 25000},
|
|
||||||
)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.dog",
|
|
||||||
"work",
|
|
||||||
{"friendly_name": "dog", "latitude": 20.1, "longitude": 10.1},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert test(hass)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_zone_multiple_entities(hass: HomeAssistant) -> None:
|
|
||||||
"""Test with multiple entities in condition."""
|
|
||||||
config = {
|
|
||||||
"condition": "and",
|
|
||||||
"conditions": [
|
|
||||||
{
|
|
||||||
"alias": "Zone Condition",
|
|
||||||
"condition": "zone",
|
|
||||||
"entity_id": ["device_tracker.person_1", "device_tracker.person_2"],
|
|
||||||
"zone": "zone.home",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
config = cv.CONDITION_SCHEMA(config)
|
|
||||||
config = await condition.async_validate_condition_config(hass, config)
|
|
||||||
test = await condition.async_from_config(hass, config)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"zone.home",
|
|
||||||
"zoning",
|
|
||||||
{"name": "home", "latitude": 2.1, "longitude": 1.1, "radius": 10},
|
|
||||||
)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person_1",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person_1", "latitude": 2.1, "longitude": 1.1},
|
|
||||||
)
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person_2",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person_2", "latitude": 2.1, "longitude": 1.1},
|
|
||||||
)
|
|
||||||
assert test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person_1",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person_1", "latitude": 20.1, "longitude": 10.1},
|
|
||||||
)
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person_2",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person_2", "latitude": 2.1, "longitude": 1.1},
|
|
||||||
)
|
|
||||||
assert not test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person_1",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person_1", "latitude": 2.1, "longitude": 1.1},
|
|
||||||
)
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person_2",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person_2", "latitude": 20.1, "longitude": 10.1},
|
|
||||||
)
|
|
||||||
assert not test(hass)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_multiple_zones(hass: HomeAssistant) -> None:
|
|
||||||
"""Test with multiple entities in condition."""
|
|
||||||
config = {
|
|
||||||
"condition": "and",
|
|
||||||
"conditions": [
|
|
||||||
{
|
|
||||||
"condition": "zone",
|
|
||||||
"entity_id": "device_tracker.person",
|
|
||||||
"zone": ["zone.home", "zone.work"],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
config = cv.CONDITION_SCHEMA(config)
|
|
||||||
config = await condition.async_validate_condition_config(hass, config)
|
|
||||||
test = await condition.async_from_config(hass, config)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"zone.home",
|
|
||||||
"zoning",
|
|
||||||
{"name": "home", "latitude": 2.1, "longitude": 1.1, "radius": 10},
|
|
||||||
)
|
|
||||||
hass.states.async_set(
|
|
||||||
"zone.work",
|
|
||||||
"zoning",
|
|
||||||
{"name": "work", "latitude": 20.1, "longitude": 10.1, "radius": 10},
|
|
||||||
)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person", "latitude": 2.1, "longitude": 1.1},
|
|
||||||
)
|
|
||||||
assert test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person", "latitude": 20.1, "longitude": 10.1},
|
|
||||||
)
|
|
||||||
assert test(hass)
|
|
||||||
|
|
||||||
hass.states.async_set(
|
|
||||||
"device_tracker.person",
|
|
||||||
"home",
|
|
||||||
{"friendly_name": "person", "latitude": 50.1, "longitude": 20.1},
|
|
||||||
)
|
|
||||||
assert not test(hass)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hass")
|
@pytest.mark.usefixtures("hass")
|
||||||
async def test_extract_entities() -> None:
|
async def test_extract_entities() -> None:
|
||||||
"""Test extracting entities."""
|
"""Test extracting entities."""
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
import dataclasses
|
||||||
import io
|
import io
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import AsyncMock, Mock, patch
|
from unittest.mock import AsyncMock, Mock, patch
|
||||||
@ -2322,3 +2323,80 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None:
|
|||||||
]
|
]
|
||||||
await asyncio.gather(*tasks)
|
await asyncio.gather(*tasks)
|
||||||
assert reloaded == unordered(["all", "target1", "target2", "target3", "target4"])
|
assert reloaded == unordered(["all", "target1", "target2", "target3", "target4"])
|
||||||
|
|
||||||
|
|
||||||
|
async def test_deprecated_service_target_selector_class(hass: HomeAssistant) -> None:
|
||||||
|
"""Test that the deprecated ServiceTargetSelector class forwards correctly."""
|
||||||
|
call = ServiceCall(
|
||||||
|
hass,
|
||||||
|
"test",
|
||||||
|
"test",
|
||||||
|
{
|
||||||
|
"entity_id": ["light.test", "switch.test"],
|
||||||
|
"area_id": "kitchen",
|
||||||
|
"device_id": ["device1", "device2"],
|
||||||
|
"floor_id": "first_floor",
|
||||||
|
"label_id": ["label1", "label2"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
selector = service.ServiceTargetSelector(call)
|
||||||
|
|
||||||
|
assert selector.entity_ids == {"light.test", "switch.test"}
|
||||||
|
assert selector.area_ids == {"kitchen"}
|
||||||
|
assert selector.device_ids == {"device1", "device2"}
|
||||||
|
assert selector.floor_ids == {"first_floor"}
|
||||||
|
assert selector.label_ids == {"label1", "label2"}
|
||||||
|
assert selector.has_any_selector is True
|
||||||
|
|
||||||
|
|
||||||
|
async def test_deprecated_selected_entities_class(
|
||||||
|
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
"""Test that the deprecated SelectedEntities class forwards correctly."""
|
||||||
|
selected = service.SelectedEntities(
|
||||||
|
referenced={"entity.test"},
|
||||||
|
indirectly_referenced=set(),
|
||||||
|
referenced_devices=set(),
|
||||||
|
referenced_areas=set(),
|
||||||
|
missing_devices={"missing_device"},
|
||||||
|
missing_areas={"missing_area"},
|
||||||
|
missing_floors={"missing_floor"},
|
||||||
|
missing_labels={"missing_label"},
|
||||||
|
)
|
||||||
|
|
||||||
|
missing_entities = {"entity.missing"}
|
||||||
|
selected.log_missing(missing_entities)
|
||||||
|
assert (
|
||||||
|
"Referenced floors missing_floor, areas missing_area, "
|
||||||
|
"devices missing_device, entities entity.missing, "
|
||||||
|
"labels missing_label are missing or not currently available" in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_deprecated_async_extract_referenced_entity_ids(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
) -> None:
|
||||||
|
"""Test that the deprecated async_extract_referenced_entity_ids function forwards correctly."""
|
||||||
|
from homeassistant.helpers import target # noqa: PLC0415
|
||||||
|
|
||||||
|
mock_selected = target.SelectedEntities(
|
||||||
|
referenced={"entity.test"},
|
||||||
|
indirectly_referenced={"entity.indirect"},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"homeassistant.helpers.target.async_extract_referenced_entity_ids",
|
||||||
|
return_value=mock_selected,
|
||||||
|
) as mock_target_func:
|
||||||
|
call = ServiceCall(hass, "test", "test", {"entity_id": "light.test"})
|
||||||
|
result = service.async_extract_referenced_entity_ids(
|
||||||
|
hass, call, expand_group=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify target helper was called with correct parameters
|
||||||
|
mock_target_func.assert_called_once()
|
||||||
|
args = mock_target_func.call_args
|
||||||
|
assert args[0][0] is hass
|
||||||
|
assert args[0][1].entity_ids == {"light.test"}
|
||||||
|
assert args[0][2] is False
|
||||||
|
|
||||||
|
assert dataclasses.asdict(result) == dataclasses.asdict(mock_selected)
|
||||||
|
459
tests/helpers/test_target.py
Normal file
459
tests/helpers/test_target.py
Normal file
@ -0,0 +1,459 @@
|
|||||||
|
"""Test service helpers."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# TODO(abmantis): is this import needed?
|
||||||
|
# To prevent circular import when running just this file
|
||||||
|
import homeassistant.components # noqa: F401
|
||||||
|
from homeassistant.components.group import Group
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_AREA_ID,
|
||||||
|
ATTR_DEVICE_ID,
|
||||||
|
ATTR_ENTITY_ID,
|
||||||
|
ATTR_FLOOR_ID,
|
||||||
|
ATTR_LABEL_ID,
|
||||||
|
ENTITY_MATCH_NONE,
|
||||||
|
STATE_OFF,
|
||||||
|
STATE_ON,
|
||||||
|
EntityCategory,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import (
|
||||||
|
area_registry as ar,
|
||||||
|
device_registry as dr,
|
||||||
|
entity_registry as er,
|
||||||
|
target,
|
||||||
|
)
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
from homeassistant.setup import async_setup_component
|
||||||
|
|
||||||
|
from tests.common import (
|
||||||
|
RegistryEntryWithDefaults,
|
||||||
|
mock_area_registry,
|
||||||
|
mock_device_registry,
|
||||||
|
mock_registry,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def registries_mock(hass: HomeAssistant) -> None:
|
||||||
|
"""Mock including floor and area info."""
|
||||||
|
hass.states.async_set("light.Bowl", STATE_ON)
|
||||||
|
hass.states.async_set("light.Ceiling", STATE_OFF)
|
||||||
|
hass.states.async_set("light.Kitchen", STATE_OFF)
|
||||||
|
|
||||||
|
area_in_floor = ar.AreaEntry(
|
||||||
|
id="test-area",
|
||||||
|
name="Test area",
|
||||||
|
aliases={},
|
||||||
|
floor_id="test-floor",
|
||||||
|
icon=None,
|
||||||
|
picture=None,
|
||||||
|
temperature_entity_id=None,
|
||||||
|
humidity_entity_id=None,
|
||||||
|
)
|
||||||
|
area_in_floor_a = ar.AreaEntry(
|
||||||
|
id="area-a",
|
||||||
|
name="Area A",
|
||||||
|
aliases={},
|
||||||
|
floor_id="floor-a",
|
||||||
|
icon=None,
|
||||||
|
picture=None,
|
||||||
|
temperature_entity_id=None,
|
||||||
|
humidity_entity_id=None,
|
||||||
|
)
|
||||||
|
area_with_labels = ar.AreaEntry(
|
||||||
|
id="area-with-labels",
|
||||||
|
name="Area with labels",
|
||||||
|
aliases={},
|
||||||
|
floor_id=None,
|
||||||
|
icon=None,
|
||||||
|
labels={"label_area"},
|
||||||
|
picture=None,
|
||||||
|
temperature_entity_id=None,
|
||||||
|
humidity_entity_id=None,
|
||||||
|
)
|
||||||
|
mock_area_registry(
|
||||||
|
hass,
|
||||||
|
{
|
||||||
|
area_in_floor.id: area_in_floor,
|
||||||
|
area_in_floor_a.id: area_in_floor_a,
|
||||||
|
area_with_labels.id: area_with_labels,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
device_in_area = dr.DeviceEntry(id="device-test-area", area_id="test-area")
|
||||||
|
device_no_area = dr.DeviceEntry(id="device-no-area-id")
|
||||||
|
device_diff_area = dr.DeviceEntry(id="device-diff-area", area_id="diff-area")
|
||||||
|
device_area_a = dr.DeviceEntry(id="device-area-a-id", area_id="area-a")
|
||||||
|
device_has_label1 = dr.DeviceEntry(id="device-has-label1-id", labels={"label1"})
|
||||||
|
device_has_label2 = dr.DeviceEntry(id="device-has-label2-id", labels={"label2"})
|
||||||
|
device_has_labels = dr.DeviceEntry(
|
||||||
|
id="device-has-labels-id",
|
||||||
|
labels={"label1", "label2"},
|
||||||
|
area_id=area_with_labels.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_device_registry(
|
||||||
|
hass,
|
||||||
|
{
|
||||||
|
device_in_area.id: device_in_area,
|
||||||
|
device_no_area.id: device_no_area,
|
||||||
|
device_diff_area.id: device_diff_area,
|
||||||
|
device_area_a.id: device_area_a,
|
||||||
|
device_has_label1.id: device_has_label1,
|
||||||
|
device_has_label2.id: device_has_label2,
|
||||||
|
device_has_labels.id: device_has_labels,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
entity_in_own_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.in_own_area",
|
||||||
|
unique_id="in-own-area-id",
|
||||||
|
platform="test",
|
||||||
|
area_id="own-area",
|
||||||
|
)
|
||||||
|
config_entity_in_own_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.config_in_own_area",
|
||||||
|
unique_id="config-in-own-area-id",
|
||||||
|
platform="test",
|
||||||
|
area_id="own-area",
|
||||||
|
entity_category=EntityCategory.CONFIG,
|
||||||
|
)
|
||||||
|
hidden_entity_in_own_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.hidden_in_own_area",
|
||||||
|
unique_id="hidden-in-own-area-id",
|
||||||
|
platform="test",
|
||||||
|
area_id="own-area",
|
||||||
|
hidden_by=er.RegistryEntryHider.USER,
|
||||||
|
)
|
||||||
|
entity_in_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.in_area",
|
||||||
|
unique_id="in-area-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_in_area.id,
|
||||||
|
)
|
||||||
|
config_entity_in_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.config_in_area",
|
||||||
|
unique_id="config-in-area-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_in_area.id,
|
||||||
|
entity_category=EntityCategory.CONFIG,
|
||||||
|
)
|
||||||
|
hidden_entity_in_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.hidden_in_area",
|
||||||
|
unique_id="hidden-in-area-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_in_area.id,
|
||||||
|
hidden_by=er.RegistryEntryHider.USER,
|
||||||
|
)
|
||||||
|
entity_in_other_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.in_other_area",
|
||||||
|
unique_id="in-area-a-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_in_area.id,
|
||||||
|
area_id="other-area",
|
||||||
|
)
|
||||||
|
entity_assigned_to_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.assigned_to_area",
|
||||||
|
unique_id="assigned-area-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_in_area.id,
|
||||||
|
area_id="test-area",
|
||||||
|
)
|
||||||
|
entity_no_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.no_area",
|
||||||
|
unique_id="no-area-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_no_area.id,
|
||||||
|
)
|
||||||
|
config_entity_no_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.config_no_area",
|
||||||
|
unique_id="config-no-area-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_no_area.id,
|
||||||
|
entity_category=EntityCategory.CONFIG,
|
||||||
|
)
|
||||||
|
hidden_entity_no_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.hidden_no_area",
|
||||||
|
unique_id="hidden-no-area-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_no_area.id,
|
||||||
|
hidden_by=er.RegistryEntryHider.USER,
|
||||||
|
)
|
||||||
|
entity_diff_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.diff_area",
|
||||||
|
unique_id="diff-area-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_diff_area.id,
|
||||||
|
)
|
||||||
|
entity_in_area_a = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.in_area_a",
|
||||||
|
unique_id="in-area-a-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_area_a.id,
|
||||||
|
area_id="area-a",
|
||||||
|
)
|
||||||
|
entity_in_area_b = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.in_area_b",
|
||||||
|
unique_id="in-area-b-id",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_area_a.id,
|
||||||
|
area_id="area-b",
|
||||||
|
)
|
||||||
|
entity_with_my_label = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.with_my_label",
|
||||||
|
unique_id="with_my_label",
|
||||||
|
platform="test",
|
||||||
|
labels={"my-label"},
|
||||||
|
)
|
||||||
|
hidden_entity_with_my_label = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.hidden_with_my_label",
|
||||||
|
unique_id="hidden_with_my_label",
|
||||||
|
platform="test",
|
||||||
|
labels={"my-label"},
|
||||||
|
hidden_by=er.RegistryEntryHider.USER,
|
||||||
|
)
|
||||||
|
config_entity_with_my_label = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.config_with_my_label",
|
||||||
|
unique_id="config_with_my_label",
|
||||||
|
platform="test",
|
||||||
|
labels={"my-label"},
|
||||||
|
entity_category=EntityCategory.CONFIG,
|
||||||
|
)
|
||||||
|
entity_with_label1_from_device = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.with_label1_from_device",
|
||||||
|
unique_id="with_label1_from_device",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_has_label1.id,
|
||||||
|
)
|
||||||
|
entity_with_label1_from_device_and_different_area = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.with_label1_from_device_diff_area",
|
||||||
|
unique_id="with_label1_from_device_diff_area",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_has_label1.id,
|
||||||
|
area_id=area_in_floor_a.id,
|
||||||
|
)
|
||||||
|
entity_with_label1_and_label2_from_device = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.with_label1_and_label2_from_device",
|
||||||
|
unique_id="with_label1_and_label2_from_device",
|
||||||
|
platform="test",
|
||||||
|
labels={"label1"},
|
||||||
|
device_id=device_has_label2.id,
|
||||||
|
)
|
||||||
|
entity_with_labels_from_device = RegistryEntryWithDefaults(
|
||||||
|
entity_id="light.with_labels_from_device",
|
||||||
|
unique_id="with_labels_from_device",
|
||||||
|
platform="test",
|
||||||
|
device_id=device_has_labels.id,
|
||||||
|
)
|
||||||
|
mock_registry(
|
||||||
|
hass,
|
||||||
|
{
|
||||||
|
entity_in_own_area.entity_id: entity_in_own_area,
|
||||||
|
config_entity_in_own_area.entity_id: config_entity_in_own_area,
|
||||||
|
hidden_entity_in_own_area.entity_id: hidden_entity_in_own_area,
|
||||||
|
entity_in_area.entity_id: entity_in_area,
|
||||||
|
config_entity_in_area.entity_id: config_entity_in_area,
|
||||||
|
hidden_entity_in_area.entity_id: hidden_entity_in_area,
|
||||||
|
entity_in_other_area.entity_id: entity_in_other_area,
|
||||||
|
entity_assigned_to_area.entity_id: entity_assigned_to_area,
|
||||||
|
entity_no_area.entity_id: entity_no_area,
|
||||||
|
config_entity_no_area.entity_id: config_entity_no_area,
|
||||||
|
hidden_entity_no_area.entity_id: hidden_entity_no_area,
|
||||||
|
entity_diff_area.entity_id: entity_diff_area,
|
||||||
|
entity_in_area_a.entity_id: entity_in_area_a,
|
||||||
|
entity_in_area_b.entity_id: entity_in_area_b,
|
||||||
|
config_entity_with_my_label.entity_id: config_entity_with_my_label,
|
||||||
|
entity_with_label1_and_label2_from_device.entity_id: entity_with_label1_and_label2_from_device,
|
||||||
|
entity_with_label1_from_device.entity_id: entity_with_label1_from_device,
|
||||||
|
entity_with_label1_from_device_and_different_area.entity_id: entity_with_label1_from_device_and_different_area,
|
||||||
|
entity_with_labels_from_device.entity_id: entity_with_labels_from_device,
|
||||||
|
entity_with_my_label.entity_id: entity_with_my_label,
|
||||||
|
hidden_entity_with_my_label.entity_id: hidden_entity_with_my_label,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("selector_config", "expand_group", "expected_selected"),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
{
|
||||||
|
ATTR_ENTITY_ID: ENTITY_MATCH_NONE,
|
||||||
|
ATTR_AREA_ID: ENTITY_MATCH_NONE,
|
||||||
|
ATTR_FLOOR_ID: ENTITY_MATCH_NONE,
|
||||||
|
ATTR_LABEL_ID: ENTITY_MATCH_NONE,
|
||||||
|
},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_ENTITY_ID: "light.bowl"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(referenced={"light.bowl"}),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_ENTITY_ID: "group.test"},
|
||||||
|
True,
|
||||||
|
target.SelectedEntities(referenced={"light.ceiling", "light.kitchen"}),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_ENTITY_ID: "group.test"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(referenced={"group.test"}),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_AREA_ID: "own-area"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={"light.in_own_area"},
|
||||||
|
referenced_areas={"own-area"},
|
||||||
|
missing_areas={"own-area"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_AREA_ID: "test-area"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={
|
||||||
|
"light.in_area",
|
||||||
|
"light.assigned_to_area",
|
||||||
|
},
|
||||||
|
referenced_areas={"test-area"},
|
||||||
|
referenced_devices={"device-test-area"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_AREA_ID: ["test-area", "diff-area"]},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={
|
||||||
|
"light.in_area",
|
||||||
|
"light.diff_area",
|
||||||
|
"light.assigned_to_area",
|
||||||
|
},
|
||||||
|
referenced_areas={"test-area", "diff-area"},
|
||||||
|
referenced_devices={"device-diff-area", "device-test-area"},
|
||||||
|
missing_areas={"diff-area"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_DEVICE_ID: "device-no-area-id"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={"light.no_area"},
|
||||||
|
referenced_devices={"device-no-area-id"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_DEVICE_ID: "device-area-a-id"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={"light.in_area_a", "light.in_area_b"},
|
||||||
|
referenced_devices={"device-area-a-id"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_FLOOR_ID: "test-floor"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={"light.in_area", "light.assigned_to_area"},
|
||||||
|
referenced_devices={"device-test-area"},
|
||||||
|
referenced_areas={"test-area"},
|
||||||
|
missing_floors={"test-floor"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_FLOOR_ID: ["test-floor", "floor-a"]},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={
|
||||||
|
"light.in_area",
|
||||||
|
"light.assigned_to_area",
|
||||||
|
"light.in_area_a",
|
||||||
|
"light.with_label1_from_device_diff_area",
|
||||||
|
},
|
||||||
|
referenced_devices={"device-area-a-id", "device-test-area"},
|
||||||
|
referenced_areas={"area-a", "test-area"},
|
||||||
|
missing_floors={"floor-a", "test-floor"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_LABEL_ID: "my-label"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={"light.with_my_label"},
|
||||||
|
missing_labels={"my-label"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_LABEL_ID: "label1"},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={
|
||||||
|
"light.with_label1_from_device",
|
||||||
|
"light.with_label1_from_device_diff_area",
|
||||||
|
"light.with_labels_from_device",
|
||||||
|
"light.with_label1_and_label2_from_device",
|
||||||
|
},
|
||||||
|
referenced_devices={"device-has-label1-id", "device-has-labels-id"},
|
||||||
|
missing_labels={"label1"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_LABEL_ID: ["label2"]},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={
|
||||||
|
"light.with_labels_from_device",
|
||||||
|
"light.with_label1_and_label2_from_device",
|
||||||
|
},
|
||||||
|
referenced_devices={"device-has-label2-id", "device-has-labels-id"},
|
||||||
|
missing_labels={"label2"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{ATTR_LABEL_ID: ["label_area"]},
|
||||||
|
False,
|
||||||
|
target.SelectedEntities(
|
||||||
|
indirectly_referenced={"light.with_labels_from_device"},
|
||||||
|
referenced_devices={"device-has-labels-id"},
|
||||||
|
referenced_areas={"area-with-labels"},
|
||||||
|
missing_labels={"label_area"},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.usefixtures("registries_mock")
|
||||||
|
async def test_extract_referenced_entity_ids(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
selector_config: ConfigType,
|
||||||
|
expand_group: bool,
|
||||||
|
expected_selected: target.SelectedEntities,
|
||||||
|
) -> None:
|
||||||
|
"""Test extract_entity_ids method."""
|
||||||
|
hass.states.async_set("light.Bowl", STATE_ON)
|
||||||
|
hass.states.async_set("light.Ceiling", STATE_OFF)
|
||||||
|
hass.states.async_set("light.Kitchen", STATE_OFF)
|
||||||
|
|
||||||
|
assert await async_setup_component(hass, "group", {})
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
await Group.async_create_group(
|
||||||
|
hass,
|
||||||
|
"test",
|
||||||
|
created_by_service=False,
|
||||||
|
entity_ids=["light.Ceiling", "light.Kitchen"],
|
||||||
|
icon=None,
|
||||||
|
mode=None,
|
||||||
|
object_id=None,
|
||||||
|
order=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
target_data = target.TargetSelectorData(selector_config)
|
||||||
|
assert (
|
||||||
|
target.async_extract_referenced_entity_ids(
|
||||||
|
hass, target_data, expand_group=expand_group
|
||||||
|
)
|
||||||
|
== expected_selected
|
||||||
|
)
|
@ -194,7 +194,6 @@ class AiohttpClientMockResponse:
|
|||||||
if response is None:
|
if response is None:
|
||||||
response = b""
|
response = b""
|
||||||
|
|
||||||
self.charset = "utf-8"
|
|
||||||
self.method = method
|
self.method = method
|
||||||
self._url = url
|
self._url = url
|
||||||
self.status = status
|
self.status = status
|
||||||
@ -264,16 +263,32 @@ class AiohttpClientMockResponse:
|
|||||||
"""Return content."""
|
"""Return content."""
|
||||||
return mock_stream(self.response)
|
return mock_stream(self.response)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def charset(self):
|
||||||
|
"""Return charset from Content-Type header."""
|
||||||
|
if (content_type := self._headers.get("content-type")) is None:
|
||||||
|
return None
|
||||||
|
content_type = content_type.lower()
|
||||||
|
if "charset=" in content_type:
|
||||||
|
return content_type.split("charset=")[1].split(";")[0].strip()
|
||||||
|
return None
|
||||||
|
|
||||||
async def read(self):
|
async def read(self):
|
||||||
"""Return mock response."""
|
"""Return mock response."""
|
||||||
return self.response
|
return self.response
|
||||||
|
|
||||||
async def text(self, encoding="utf-8", errors="strict"):
|
async def text(self, encoding=None, errors="strict") -> str:
|
||||||
"""Return mock response as a string."""
|
"""Return mock response as a string."""
|
||||||
|
# Match real aiohttp behavior: encoding=None means auto-detect
|
||||||
|
if encoding is None:
|
||||||
|
encoding = self.charset or "utf-8"
|
||||||
return self.response.decode(encoding, errors=errors)
|
return self.response.decode(encoding, errors=errors)
|
||||||
|
|
||||||
async def json(self, encoding="utf-8", content_type=None, loads=json_loads):
|
async def json(self, encoding=None, content_type=None, loads=json_loads) -> Any:
|
||||||
"""Return mock response as a json."""
|
"""Return mock response as a json."""
|
||||||
|
# Match real aiohttp behavior: encoding=None means auto-detect
|
||||||
|
if encoding is None:
|
||||||
|
encoding = self.charset or "utf-8"
|
||||||
return loads(self.response.decode(encoding))
|
return loads(self.response.decode(encoding))
|
||||||
|
|
||||||
def release(self):
|
def release(self):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user