mirror of
https://github.com/home-assistant/core.git
synced 2025-10-31 22:49:26 +00:00
Compare commits
1 Commits
llm_device
...
edenhaus-g
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5e1869a90 |
@@ -278,7 +278,6 @@ homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.inels.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -741,8 +741,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/improv_ble/ @emontnemery
|
||||
/homeassistant/components/incomfort/ @jbouwh
|
||||
/tests/components/incomfort/ @jbouwh
|
||||
/homeassistant/components/inels/ @epdevlab
|
||||
/tests/components/inels/ @epdevlab
|
||||
/homeassistant/components/influxdb/ @mdegat01
|
||||
/tests/components/influxdb/ @mdegat01
|
||||
/homeassistant/components/inkbird/ @bdraco
|
||||
|
||||
15
Dockerfile
generated
15
Dockerfile
generated
@@ -15,20 +15,7 @@ ARG QEMU_CPU
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
"armhf") go2rtc_suffix='armv6' ;; \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:1.9.11 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.9.5
|
||||
|
||||
@@ -5,6 +5,9 @@ build_from:
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["airos==0.6.0"]
|
||||
"requirements": ["airos==0.5.6"]
|
||||
}
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
"""The iNELS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from inelsmqtt import InelsMqtt
|
||||
from inelsmqtt.devices import Device
|
||||
from inelsmqtt.discovery import InelsDiscovery
|
||||
|
||||
from homeassistant.components import mqtt as ha_mqtt
|
||||
from homeassistant.components.mqtt import (
|
||||
ReceiveMessage,
|
||||
async_prepare_subscribe_topics,
|
||||
async_subscribe_topics,
|
||||
async_unsubscribe_topics,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import LOGGER, PLATFORMS
|
||||
|
||||
type InelsConfigEntry = ConfigEntry[InelsData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class InelsData:
|
||||
"""Represents the data structure for INELS runtime data."""
|
||||
|
||||
mqtt: InelsMqtt
|
||||
devices: list[Device]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: InelsConfigEntry) -> bool:
|
||||
"""Set up iNELS from a config entry."""
|
||||
|
||||
async def mqtt_publish(topic: str, payload: str, qos: int, retain: bool) -> None:
|
||||
"""Publish an MQTT message using the Home Assistant MQTT client."""
|
||||
await ha_mqtt.async_publish(hass, topic, payload, qos, retain)
|
||||
|
||||
async def mqtt_subscribe(
|
||||
sub_state: dict[str, Any] | None,
|
||||
topic: str,
|
||||
callback_func: Callable[[str, str], None],
|
||||
) -> dict[str, Any]:
|
||||
"""Subscribe to MQTT topics using the Home Assistant MQTT client."""
|
||||
|
||||
@callback
|
||||
def mqtt_message_received(msg: ReceiveMessage) -> None:
|
||||
"""Handle iNELS mqtt messages."""
|
||||
# Payload is always str at runtime since we don't set encoding=None
|
||||
# HA uses UTF-8 by default
|
||||
callback_func(msg.topic, msg.payload) # type: ignore[arg-type]
|
||||
|
||||
topics = {
|
||||
"inels_subscribe_topic": {
|
||||
"topic": topic,
|
||||
"msg_callback": mqtt_message_received,
|
||||
}
|
||||
}
|
||||
|
||||
sub_state = async_prepare_subscribe_topics(hass, sub_state, topics)
|
||||
await async_subscribe_topics(hass, sub_state)
|
||||
return sub_state
|
||||
|
||||
async def mqtt_unsubscribe(sub_state: dict[str, Any]) -> None:
|
||||
async_unsubscribe_topics(hass, sub_state)
|
||||
|
||||
if not await ha_mqtt.async_wait_for_mqtt_client(hass):
|
||||
LOGGER.error("MQTT integration not available")
|
||||
raise ConfigEntryNotReady("MQTT integration not available")
|
||||
|
||||
inels_mqtt = InelsMqtt(mqtt_publish, mqtt_subscribe, mqtt_unsubscribe)
|
||||
devices: list[Device] = await InelsDiscovery(inels_mqtt).start()
|
||||
|
||||
# If no devices are discovered, continue with the setup
|
||||
if not devices:
|
||||
LOGGER.info("No devices discovered")
|
||||
|
||||
entry.runtime_data = InelsData(mqtt=inels_mqtt, devices=devices)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: InelsConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await entry.runtime_data.mqtt.unsubscribe_topics()
|
||||
entry.runtime_data.mqtt.unsubscribe_listeners()
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""Config flow for iNELS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers.service_info.mqtt import MqttServiceInfo
|
||||
|
||||
from .const import DOMAIN, TITLE
|
||||
|
||||
|
||||
class INelsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle of iNELS config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_mqtt(
|
||||
self, discovery_info: MqttServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by MQTT discovery."""
|
||||
if self._async_in_progress():
|
||||
return self.async_abort(reason="already_in_progress")
|
||||
|
||||
# Validate the message, abort if it fails.
|
||||
if not discovery_info.topic.endswith("/gw"):
|
||||
# Not an iNELS discovery message.
|
||||
return self.async_abort(reason="invalid_discovery_info")
|
||||
if not discovery_info.payload:
|
||||
# Empty payload, unexpected payload.
|
||||
return self.async_abort(reason="invalid_discovery_info")
|
||||
|
||||
return await self.async_step_confirm_from_mqtt()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
try:
|
||||
if not mqtt.is_connected(self.hass):
|
||||
return self.async_abort(reason="mqtt_not_connected")
|
||||
except KeyError:
|
||||
return self.async_abort(reason="mqtt_not_configured")
|
||||
|
||||
return await self.async_step_confirm_from_user()
|
||||
|
||||
async def step_confirm(
|
||||
self, step_id: str, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup."""
|
||||
|
||||
if user_input is not None:
|
||||
await self.async_set_unique_id(DOMAIN)
|
||||
return self.async_create_entry(title=TITLE, data={})
|
||||
|
||||
return self.async_show_form(step_id=step_id)
|
||||
|
||||
async def async_step_confirm_from_mqtt(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup from MQTT discovered."""
|
||||
return await self.step_confirm(
|
||||
step_id="confirm_from_mqtt", user_input=user_input
|
||||
)
|
||||
|
||||
async def async_step_confirm_from_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup from user add integration."""
|
||||
return await self.step_confirm(
|
||||
step_id="confirm_from_user", user_input=user_input
|
||||
)
|
||||
@@ -1,14 +0,0 @@
|
||||
"""Constants for the iNELS integration."""
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "inels"
|
||||
TITLE = "iNELS"
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
@@ -1,61 +0,0 @@
|
||||
"""Base class for iNELS components."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from inelsmqtt.devices import Device
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class InelsBaseEntity(Entity):
|
||||
"""Base iNELS entity."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
key: str,
|
||||
index: int,
|
||||
) -> None:
|
||||
"""Init base entity."""
|
||||
self._device = device
|
||||
self._device_id = device.unique_id
|
||||
self._attr_unique_id = self._device_id
|
||||
|
||||
# The referenced variable to read from
|
||||
self._key = key
|
||||
# The index of the variable list to read from. '-1' for no index
|
||||
self._index = index
|
||||
|
||||
info = device.info()
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.unique_id)},
|
||||
manufacturer=info.manufacturer,
|
||||
model=info.model_number,
|
||||
name=device.title,
|
||||
sw_version=info.sw_version,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Add subscription of the data listener."""
|
||||
# Register the HA callback
|
||||
self._device.add_ha_callback(self._key, self._index, self._callback)
|
||||
# Subscribe to MQTT updates
|
||||
self._device.mqtt.subscribe_listener(
|
||||
self._device.state_topic, self._device.unique_id, self._device.callback
|
||||
)
|
||||
|
||||
def _callback(self) -> None:
|
||||
"""Get data from broker into the HA."""
|
||||
if hasattr(self, "hass"):
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return self._device.is_available
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"switch": {
|
||||
"bit": {
|
||||
"default": "mdi:power-socket-eu"
|
||||
},
|
||||
"simple_relay": {
|
||||
"default": "mdi:power-socket-eu"
|
||||
},
|
||||
"relay": {
|
||||
"default": "mdi:power-socket-eu"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"domain": "inels",
|
||||
"name": "iNELS",
|
||||
"codeowners": ["@epdevlab"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["mqtt"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/inels",
|
||||
"iot_class": "local_push",
|
||||
"mqtt": ["inels/status/#"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["elkoep-aio-mqtt==0.1.0b4"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
@@ -1,118 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
config-flow: done
|
||||
test-before-configure: done
|
||||
unique-config-entry: done
|
||||
config-flow-test-coverage: done
|
||||
runtime-data: done
|
||||
test-before-setup:
|
||||
status: done
|
||||
comment: >
|
||||
Raise "Invalid authentication" and "MQTT Broker is offline or
|
||||
cannot be reached" otherwise, async_setup_entry returns False
|
||||
appropriate-polling:
|
||||
status: done
|
||||
comment: |
|
||||
Integration uses local_push.
|
||||
entity-unique-id:
|
||||
status: done
|
||||
comment: |
|
||||
{MAC}_{DEVICE_ID} is used, for example, 0e97f8b7d30_02E8.
|
||||
has-entity-name:
|
||||
status: done
|
||||
comment: >
|
||||
Almost all devices are multi-functional, which means that all functions
|
||||
are equally important -> keep the descriptive name (not setting _attr_name to None).
|
||||
entity-event-setup:
|
||||
status: done
|
||||
comment: |
|
||||
Subscribe in async_added_to_hass & unsubscribe from async_unload_entry.
|
||||
dependency-transparency: done
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
common-modules: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions:
|
||||
status: done
|
||||
comment: |
|
||||
A link to the wiki is provided.
|
||||
docs-removal-instructions: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
brands: done
|
||||
# Silver
|
||||
config-entry-unloading: done
|
||||
log-when-unavailable: todo
|
||||
entity-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
available property.
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
reauthentication-flow: todo
|
||||
parallel-updates:
|
||||
status: todo
|
||||
comment: |
|
||||
For all platforms, add a constant PARALLEL_UPDATES = 0.
|
||||
test-coverage: done
|
||||
integration-owner: done
|
||||
docs-installation-parameters:
|
||||
status: done
|
||||
comment: |
|
||||
A link to the wiki is provided.
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: >
|
||||
There is the same options flow in the integration as there is in the
|
||||
configuration.
|
||||
|
||||
# Gold
|
||||
entity-translations: done
|
||||
entity-device-class: todo
|
||||
devices: done
|
||||
entity-category: todo
|
||||
entity-disabled-by-default: todo
|
||||
discovery:
|
||||
status: todo
|
||||
comment: |
|
||||
Currently blocked by a hw limitation.
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: >
|
||||
Same as discovery. The async_remove_config_entry_device function should be
|
||||
implemented at a minimum.
|
||||
diagnostics: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
dynamic-devices: todo
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: |
|
||||
Same as discovery.
|
||||
repair-issues: todo
|
||||
docs-use-cases: todo
|
||||
docs-supported-devices:
|
||||
status: todo
|
||||
comment: >
|
||||
In regards to this and below doc requirements, I am not sure whether the
|
||||
wiki link is acceptable.
|
||||
docs-supported-functions: todo
|
||||
docs-data-update: todo
|
||||
docs-known-limitations: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-examples: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration is not making any HTTP requests.
|
||||
strict-typing: todo
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"confirm_from_user": {
|
||||
"description": "iNELS devices must be connected to the same broker as the Home Assistant MQTT integration client. Continue setup?"
|
||||
},
|
||||
"confirm_from_mqtt": {
|
||||
"description": "Do you want to set up iNELS?"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"mqtt_not_connected": "Home Assistant MQTT integration not connected to MQTT broker.",
|
||||
"mqtt_not_configured": "Home Assistant MQTT integration not configured.",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"switch": {
|
||||
"bit": {
|
||||
"name": "Bit{addr}"
|
||||
},
|
||||
"simple_relay": {
|
||||
"name": "Simple relay{index}"
|
||||
},
|
||||
"relay": {
|
||||
"name": "Relay{index}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
"""iNELS switch entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from inelsmqtt.devices import Device
|
||||
from inelsmqtt.utils.common import Bit, Relay, SimpleRelay
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import InelsConfigEntry
|
||||
from .entity import InelsBaseEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class InelsSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Class describing iNELS switch entities."""
|
||||
|
||||
get_state_fn: Callable[[Device, int], Bit | SimpleRelay | Relay]
|
||||
alerts: list[str] | None = None
|
||||
placeholder_fn: Callable[[Device, int, bool], dict[str, str]]
|
||||
|
||||
|
||||
SWITCH_TYPES = [
|
||||
InelsSwitchEntityDescription(
|
||||
key="bit",
|
||||
translation_key="bit",
|
||||
get_state_fn=lambda device, index: device.state.bit[index],
|
||||
placeholder_fn=lambda device, index, indexed: {
|
||||
"addr": f" {device.state.bit[index].addr}"
|
||||
},
|
||||
),
|
||||
InelsSwitchEntityDescription(
|
||||
key="simple_relay",
|
||||
translation_key="simple_relay",
|
||||
get_state_fn=lambda device, index: device.state.simple_relay[index],
|
||||
placeholder_fn=lambda device, index, indexed: {
|
||||
"index": f" {index + 1}" if indexed else ""
|
||||
},
|
||||
),
|
||||
InelsSwitchEntityDescription(
|
||||
key="relay",
|
||||
translation_key="relay",
|
||||
get_state_fn=lambda device, index: device.state.relay[index],
|
||||
alerts=["overflow"],
|
||||
placeholder_fn=lambda device, index, indexed: {
|
||||
"index": f" {index + 1}" if indexed else ""
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: InelsConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Load iNELS switch."""
|
||||
entities: list[InelsSwitch] = []
|
||||
|
||||
for device in entry.runtime_data.devices:
|
||||
for description in SWITCH_TYPES:
|
||||
if hasattr(device.state, description.key):
|
||||
switch_count = len(getattr(device.state, description.key))
|
||||
entities.extend(
|
||||
InelsSwitch(
|
||||
device=device,
|
||||
description=description,
|
||||
index=idx,
|
||||
switch_count=switch_count,
|
||||
)
|
||||
for idx in range(switch_count)
|
||||
)
|
||||
|
||||
async_add_entities(entities, False)
|
||||
|
||||
|
||||
class InelsSwitch(InelsBaseEntity, SwitchEntity):
|
||||
"""The platform class required by Home Assistant."""
|
||||
|
||||
entity_description: InelsSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
description: InelsSwitchEntityDescription,
|
||||
index: int = 0,
|
||||
switch_count: int = 1,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(device=device, key=description.key, index=index)
|
||||
self.entity_description = description
|
||||
self._switch_count = switch_count
|
||||
|
||||
# Include index in unique_id for devices with multiple switches
|
||||
unique_key = f"{description.key}{index}" if index else description.key
|
||||
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{unique_key}".lower()
|
||||
|
||||
# Set translation placeholders
|
||||
self._attr_translation_placeholders = self.entity_description.placeholder_fn(
|
||||
self._device, self._index, self._switch_count > 1
|
||||
)
|
||||
|
||||
def _check_alerts(self, current_state: Bit | SimpleRelay | Relay) -> None:
|
||||
"""Check if there are active alerts and raise ServiceValidationError if found."""
|
||||
if self.entity_description.alerts and any(
|
||||
getattr(current_state, alert_key, None)
|
||||
for alert_key in self.entity_description.alerts
|
||||
):
|
||||
raise ServiceValidationError("Cannot operate switch with active alerts")
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return if switch is on."""
|
||||
current_state = self.entity_description.get_state_fn(self._device, self._index)
|
||||
return current_state.is_on
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Instruct the switch to turn off."""
|
||||
current_state = self.entity_description.get_state_fn(self._device, self._index)
|
||||
self._check_alerts(current_state)
|
||||
current_state.is_on = False
|
||||
await self._device.set_ha_value(self._device.state)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Instruct the switch to turn on."""
|
||||
current_state = self.entity_description.get_state_fn(self._device, self._index)
|
||||
self._check_alerts(current_state)
|
||||
current_state.is_on = True
|
||||
await self._device.set_ha_value(self._device.state)
|
||||
@@ -18,7 +18,6 @@ from homeassistant.const import (
|
||||
CONF_SOURCE,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
UnitOfSpeed,
|
||||
@@ -51,7 +50,6 @@ DEVICE_CLASS_MAPPING = {
|
||||
pypck.lcn_defs.VarUnit.VOLT: SensorDeviceClass.VOLTAGE,
|
||||
pypck.lcn_defs.VarUnit.AMPERE: SensorDeviceClass.CURRENT,
|
||||
pypck.lcn_defs.VarUnit.PPM: SensorDeviceClass.CO2,
|
||||
pypck.lcn_defs.VarUnit.PERCENT: SensorDeviceClass.HUMIDITY,
|
||||
}
|
||||
|
||||
UNIT_OF_MEASUREMENT_MAPPING = {
|
||||
@@ -64,7 +62,6 @@ UNIT_OF_MEASUREMENT_MAPPING = {
|
||||
pypck.lcn_defs.VarUnit.VOLT: UnitOfElectricPotential.VOLT,
|
||||
pypck.lcn_defs.VarUnit.AMPERE: UnitOfElectricCurrent.AMPERE,
|
||||
pypck.lcn_defs.VarUnit.PPM: CONCENTRATION_PARTS_PER_MILLION,
|
||||
pypck.lcn_defs.VarUnit.PERCENT: PERCENTAGE,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["nsapi==3.1.3"]
|
||||
"requirements": ["nsapi==3.1.2"]
|
||||
}
|
||||
|
||||
@@ -48,17 +48,19 @@ rules:
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: The integration is a cloud service and thus does not support discovery.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations:
|
||||
status: todo
|
||||
comment: Add info that there are no known limitations.
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This is a service, which doesn't integrate with any devices.
|
||||
docs-supported-functions: done
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting:
|
||||
status: exempt
|
||||
comment: No known issues that could be resolved by the user.
|
||||
docs-use-cases: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration has a fixed single service.
|
||||
|
||||
@@ -2,17 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
ALL_MATCH_REGEX,
|
||||
CONF_AREA_FILTER,
|
||||
CONF_FILTER_CORONA,
|
||||
CONF_FILTERS,
|
||||
CONF_HEADLINE_FILTER,
|
||||
NO_MATCH_REGEX,
|
||||
)
|
||||
@@ -23,6 +19,20 @@ PLATFORMS: list[str] = [Platform.BINARY_SENSOR]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool:
|
||||
"""Set up platform from a ConfigEntry."""
|
||||
if CONF_HEADLINE_FILTER not in entry.data:
|
||||
filter_regex = NO_MATCH_REGEX
|
||||
|
||||
if entry.data[CONF_FILTER_CORONA]:
|
||||
filter_regex = ".*corona.*"
|
||||
|
||||
new_data = {**entry.data, CONF_HEADLINE_FILTER: filter_regex}
|
||||
new_data.pop(CONF_FILTER_CORONA, None)
|
||||
hass.config_entries.async_update_entry(entry, data=new_data)
|
||||
|
||||
if CONF_AREA_FILTER not in entry.data:
|
||||
new_data = {**entry.data, CONF_AREA_FILTER: ALL_MATCH_REGEX}
|
||||
hass.config_entries.async_update_entry(entry, data=new_data)
|
||||
|
||||
coordinator = NINADataUpdateCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
@@ -37,52 +47,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool:
|
||||
"""Migrate the config to the new format."""
|
||||
|
||||
version = entry.version
|
||||
minor_version = entry.minor_version
|
||||
|
||||
_LOGGER.debug("Migrating from version %s.%s", version, minor_version)
|
||||
if entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
new_data: dict[str, Any] = {**entry.data, CONF_FILTERS: {}}
|
||||
|
||||
if version == 1 and minor_version == 1:
|
||||
if CONF_HEADLINE_FILTER not in entry.data:
|
||||
filter_regex = NO_MATCH_REGEX
|
||||
|
||||
if entry.data.get(CONF_FILTER_CORONA, None):
|
||||
filter_regex = ".*corona.*"
|
||||
|
||||
new_data[CONF_HEADLINE_FILTER] = filter_regex
|
||||
new_data.pop(CONF_FILTER_CORONA, None)
|
||||
|
||||
if CONF_AREA_FILTER not in entry.data:
|
||||
new_data[CONF_AREA_FILTER] = ALL_MATCH_REGEX
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=2,
|
||||
)
|
||||
minor_version = 2
|
||||
|
||||
if version == 1 and minor_version == 2:
|
||||
new_data[CONF_FILTERS][CONF_HEADLINE_FILTER] = entry.data[CONF_HEADLINE_FILTER]
|
||||
new_data.pop(CONF_HEADLINE_FILTER, None)
|
||||
|
||||
new_data[CONF_FILTERS][CONF_AREA_FILTER] = entry.data[CONF_AREA_FILTER]
|
||||
new_data.pop(CONF_AREA_FILTER, None)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=3,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -14,16 +14,13 @@ from homeassistant.config_entries import (
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
ALL_MATCH_REGEX,
|
||||
CONF_AREA_FILTER,
|
||||
CONF_FILTERS,
|
||||
CONF_HEADLINE_FILTER,
|
||||
CONF_MESSAGE_SLOTS,
|
||||
CONF_REGIONS,
|
||||
@@ -90,7 +87,6 @@ class NinaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for NINA."""
|
||||
|
||||
VERSION: int = 1
|
||||
MINOR_VERSION: int = 3
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
@@ -130,8 +126,8 @@ class NinaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if group_input := user_input.get(group):
|
||||
user_input[CONF_REGIONS] += group_input
|
||||
|
||||
if not user_input[CONF_FILTERS][CONF_HEADLINE_FILTER]:
|
||||
user_input[CONF_FILTERS][CONF_HEADLINE_FILTER] = NO_MATCH_REGEX
|
||||
if not user_input[CONF_HEADLINE_FILTER]:
|
||||
user_input[CONF_HEADLINE_FILTER] = NO_MATCH_REGEX
|
||||
|
||||
if user_input[CONF_REGIONS]:
|
||||
return self.async_create_entry(
|
||||
@@ -154,18 +150,7 @@ class NinaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(CONF_MESSAGE_SLOTS, default=5): vol.All(
|
||||
int, vol.Range(min=1, max=20)
|
||||
),
|
||||
vol.Required(CONF_FILTERS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_HEADLINE_FILTER, default=NO_MATCH_REGEX
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_AREA_FILTER, default=ALL_MATCH_REGEX
|
||||
): cv.string,
|
||||
}
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_HEADLINE_FILTER, default=""): cv.string,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
@@ -274,20 +259,14 @@ class OptionsFlowHandler(OptionsFlowWithReload):
|
||||
CONF_MESSAGE_SLOTS,
|
||||
default=self.data[CONF_MESSAGE_SLOTS],
|
||||
): vol.All(int, vol.Range(min=1, max=20)),
|
||||
vol.Required(CONF_FILTERS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_HEADLINE_FILTER,
|
||||
default=self.data[CONF_FILTERS][CONF_HEADLINE_FILTER],
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_AREA_FILTER,
|
||||
default=self.data[CONF_FILTERS][CONF_AREA_FILTER],
|
||||
): cv.string,
|
||||
}
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_HEADLINE_FILTER,
|
||||
default=self.data[CONF_HEADLINE_FILTER],
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_AREA_FILTER,
|
||||
default=self.data[CONF_AREA_FILTER],
|
||||
): cv.string,
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -17,7 +17,6 @@ ALL_MATCH_REGEX: str = ".*"
|
||||
|
||||
CONF_REGIONS: str = "regions"
|
||||
CONF_MESSAGE_SLOTS: str = "slots"
|
||||
CONF_FILTERS: str = "filters"
|
||||
CONF_FILTER_CORONA: str = "corona_filter" # deprecated
|
||||
CONF_HEADLINE_FILTER: str = "headline_filter"
|
||||
CONF_AREA_FILTER: str = "area_filter"
|
||||
|
||||
@@ -17,7 +17,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
CONF_AREA_FILTER,
|
||||
CONF_FILTERS,
|
||||
CONF_HEADLINE_FILTER,
|
||||
CONF_REGIONS,
|
||||
DOMAIN,
|
||||
@@ -59,10 +58,8 @@ class NINADataUpdateCoordinator(
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self._nina: Nina = Nina(async_get_clientsession(hass))
|
||||
self.headline_filter: str = config_entry.data[CONF_FILTERS][
|
||||
CONF_HEADLINE_FILTER
|
||||
]
|
||||
self.area_filter: str = config_entry.data[CONF_FILTERS][CONF_AREA_FILTER]
|
||||
self.headline_filter: str = config_entry.data[CONF_HEADLINE_FILTER]
|
||||
self.area_filter: str = config_entry.data[CONF_AREA_FILTER]
|
||||
|
||||
regions: dict[str, str] = config_entry.data[CONF_REGIONS]
|
||||
for region in regions:
|
||||
|
||||
@@ -10,21 +10,8 @@
|
||||
"_m_to_q": "City/county (M-Q)",
|
||||
"_r_to_u": "City/county (R-U)",
|
||||
"_v_to_z": "City/county (V-Z)",
|
||||
"slots": "Maximum warnings per city/county"
|
||||
},
|
||||
"sections": {
|
||||
"filters": {
|
||||
"name": "Filters",
|
||||
"description": "Filter warnings based on their attributes",
|
||||
"data": {
|
||||
"headline_filter": "Headline blocklist",
|
||||
"area_filter": "Affected area filter"
|
||||
},
|
||||
"data_description": {
|
||||
"headline_filter": "Blacklist regex to filter warning based on headlines",
|
||||
"area_filter": "Whitelist regex to filter warnings based on affected areas"
|
||||
}
|
||||
}
|
||||
"slots": "Maximum warnings per city/county",
|
||||
"headline_filter": "Headline blocklist"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -45,21 +32,9 @@
|
||||
"_m_to_q": "[%key:component::nina::config::step::user::data::_m_to_q%]",
|
||||
"_r_to_u": "[%key:component::nina::config::step::user::data::_r_to_u%]",
|
||||
"_v_to_z": "[%key:component::nina::config::step::user::data::_v_to_z%]",
|
||||
"slots": "[%key:component::nina::config::step::user::data::slots%]"
|
||||
},
|
||||
"sections": {
|
||||
"filters": {
|
||||
"name": "[%key:component::nina::config::step::user::sections::filters::name%]",
|
||||
"description": "[%key:component::nina::config::step::user::sections::filters::description%]",
|
||||
"data": {
|
||||
"headline_filter": "[%key:component::nina::config::step::user::sections::filters::data::headline_filter%]",
|
||||
"area_filter": "[%key:component::nina::config::step::user::sections::filters::data::area_filter%]"
|
||||
},
|
||||
"data_description": {
|
||||
"headline_filter": "[%key:component::nina::config::step::user::sections::filters::data_description::headline_filter%]",
|
||||
"area_filter": "[%key:component::nina::config::step::user::sections::filters::data_description::area_filter%]"
|
||||
}
|
||||
}
|
||||
"slots": "[%key:component::nina::config::step::user::data::slots%]",
|
||||
"headline_filter": "[%key:component::nina::config::step::user::data::headline_filter%]",
|
||||
"area_filter": "Affected area filter"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -30,7 +30,6 @@ from homeassistant.const import (
|
||||
UnitOfEnergy,
|
||||
UnitOfFrequency,
|
||||
UnitOfPower,
|
||||
UnitOfPrecipitationDepth,
|
||||
UnitOfPressure,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
@@ -1492,27 +1491,6 @@ RPC_SENSORS: Final = {
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["dark", "twilight", "bright"],
|
||||
),
|
||||
"number_average_temperature": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=1,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
role="average_temperature",
|
||||
removal_condition=lambda config, _s, _k: not config.get("service:0", {}).get(
|
||||
"weather_api", False
|
||||
),
|
||||
),
|
||||
"number_last_precipitation": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
role="last_precipitation",
|
||||
removal_condition=lambda config, _s, _k: not config.get("service:0", {}).get(
|
||||
"weather_api", False
|
||||
),
|
||||
),
|
||||
"number_current_humidity": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
|
||||
@@ -15,7 +15,6 @@ from . import SqueezeboxConfigEntry
|
||||
from .const import SIGNAL_PLAYER_DISCOVERED
|
||||
from .coordinator import SqueezeBoxPlayerUpdateCoordinator
|
||||
from .entity import SqueezeboxEntity
|
||||
from .util import safe_library_call
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -158,10 +157,4 @@ class SqueezeboxButtonEntity(SqueezeboxEntity, ButtonEntity):
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Execute the button action."""
|
||||
await safe_library_call(
|
||||
self._player.async_query,
|
||||
"button",
|
||||
self.entity_description.press_action,
|
||||
translation_key="press_failed",
|
||||
translation_placeholders={"action": self.entity_description.press_action},
|
||||
)
|
||||
await self._player.async_query("button", self.entity_description.press_action)
|
||||
|
||||
@@ -70,7 +70,6 @@ from .const import (
|
||||
)
|
||||
from .coordinator import SqueezeBoxPlayerUpdateCoordinator
|
||||
from .entity import SqueezeboxEntity
|
||||
from .util import safe_library_call
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import SqueezeboxConfigEntry
|
||||
@@ -434,98 +433,58 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn off media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_set_power, False, translation_key="turn_off_failed"
|
||||
)
|
||||
await self._player.async_set_power(False)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Set volume level, range 0..1."""
|
||||
volume_percent = str(round(volume * 100))
|
||||
await safe_library_call(
|
||||
self._player.async_set_volume,
|
||||
volume_percent,
|
||||
translation_key="set_volume_failed",
|
||||
translation_placeholders={"volume": volume_percent},
|
||||
)
|
||||
await self._player.async_set_volume(volume_percent)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_mute_volume(self, mute: bool) -> None:
|
||||
"""Mute (true) or unmute (false) media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_set_muting,
|
||||
mute,
|
||||
translation_key="set_mute_failed",
|
||||
)
|
||||
await self._player.async_set_muting(mute)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_stop(self) -> None:
|
||||
"""Send stop command to media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_stop,
|
||||
translation_key="stop_failed",
|
||||
)
|
||||
await self._player.async_stop()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_play_pause(self) -> None:
|
||||
"""Send pause/play toggle command to media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_toggle_pause,
|
||||
translation_key="play_pause_failed",
|
||||
)
|
||||
"""Send pause command to media player."""
|
||||
await self._player.async_toggle_pause()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_play(self) -> None:
|
||||
"""Send play command to media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_play,
|
||||
translation_key="play_failed",
|
||||
)
|
||||
await self._player.async_play()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_pause(self) -> None:
|
||||
"""Send pause command to media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_pause,
|
||||
translation_key="pause_failed",
|
||||
)
|
||||
await self._player.async_pause()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_next_track(self) -> None:
|
||||
"""Send next track command."""
|
||||
await safe_library_call(
|
||||
self._player.async_index,
|
||||
"+1",
|
||||
translation_key="next_track_failed",
|
||||
)
|
||||
await self._player.async_index("+1")
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_previous_track(self) -> None:
|
||||
"""Send previous track command."""
|
||||
await safe_library_call(
|
||||
self._player.async_index,
|
||||
"-1",
|
||||
translation_key="previous_track_failed",
|
||||
)
|
||||
"""Send next track command."""
|
||||
await self._player.async_index("-1")
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_seek(self, position: float) -> None:
|
||||
"""Send seek command."""
|
||||
await safe_library_call(
|
||||
self._player.async_time,
|
||||
position,
|
||||
translation_key="seek_failed",
|
||||
translation_placeholders={"position": position},
|
||||
)
|
||||
await self._player.async_time(position)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the media player on."""
|
||||
await safe_library_call(
|
||||
self._player.async_set_power,
|
||||
True,
|
||||
translation_key="turn_on_failed",
|
||||
)
|
||||
await self._player.async_set_power(True)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_play_media(
|
||||
@@ -564,7 +523,9 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_announce_media_type",
|
||||
translation_placeholders={"media_type": str(media_type)},
|
||||
translation_placeholders={
|
||||
"media_type": str(media_type),
|
||||
},
|
||||
)
|
||||
|
||||
extra = kwargs.get(ATTR_MEDIA_EXTRA, {})
|
||||
@@ -575,7 +536,9 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_announce_volume",
|
||||
translation_placeholders={"announce_volume": ATTR_ANNOUNCE_VOLUME},
|
||||
translation_placeholders={
|
||||
"announce_volume": ATTR_ANNOUNCE_VOLUME,
|
||||
},
|
||||
) from None
|
||||
else:
|
||||
self._player.set_announce_volume(announce_volume)
|
||||
@@ -587,7 +550,7 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_announce_timeout",
|
||||
translation_placeholders={
|
||||
"announce_timeout": ATTR_ANNOUNCE_TIMEOUT
|
||||
"announce_timeout": ATTR_ANNOUNCE_TIMEOUT,
|
||||
},
|
||||
) from None
|
||||
else:
|
||||
@@ -595,19 +558,15 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
|
||||
if media_type in MediaType.MUSIC:
|
||||
if not media_id.startswith(SQUEEZEBOX_SOURCE_STRINGS):
|
||||
# do not process special squeezebox "source" media ids
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
await safe_library_call(
|
||||
self._player.async_load_url,
|
||||
media_id,
|
||||
cmd,
|
||||
translation_key="load_url_failed",
|
||||
translation_placeholders={"media_id": media_id, "cmd": cmd},
|
||||
)
|
||||
await self._player.async_load_url(media_id, cmd)
|
||||
return
|
||||
|
||||
if media_type == MediaType.PLAYLIST:
|
||||
try:
|
||||
# a saved playlist by number
|
||||
payload = {
|
||||
"search_id": media_id,
|
||||
"search_type": MediaType.PLAYLIST,
|
||||
@@ -616,6 +575,7 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
self._player, payload, self.browse_limit, self._browse_data
|
||||
)
|
||||
except BrowseError:
|
||||
# a list of urls
|
||||
content = json.loads(media_id)
|
||||
playlist = content["urls"]
|
||||
index = content["index"]
|
||||
@@ -627,19 +587,12 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
playlist = await generate_playlist(
|
||||
self._player, payload, self.browse_limit, self._browse_data
|
||||
)
|
||||
|
||||
_LOGGER.debug("Generated playlist: %s", playlist)
|
||||
|
||||
await safe_library_call(
|
||||
self._player.async_load_playlist,
|
||||
playlist,
|
||||
cmd,
|
||||
translation_key="load_playlist_failed",
|
||||
translation_placeholders={"cmd": cmd},
|
||||
)
|
||||
|
||||
await self._player.async_load_playlist(playlist, cmd)
|
||||
if index is not None:
|
||||
await self._player.async_index(index)
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_search_media(
|
||||
@@ -719,29 +672,18 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
else:
|
||||
repeat_mode = "none"
|
||||
|
||||
await safe_library_call(
|
||||
self._player.async_set_repeat,
|
||||
repeat_mode,
|
||||
translation_key="set_repeat_failed",
|
||||
)
|
||||
await self._player.async_set_repeat(repeat_mode)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_set_shuffle(self, shuffle: bool) -> None:
|
||||
"""Enable or disable shuffle mode."""
|
||||
"""Enable/disable shuffle mode."""
|
||||
shuffle_mode = "song" if shuffle else "none"
|
||||
await safe_library_call(
|
||||
self._player.async_set_shuffle,
|
||||
shuffle_mode,
|
||||
translation_key="set_shuffle_failed",
|
||||
)
|
||||
await self._player.async_set_shuffle(shuffle_mode)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_clear_playlist(self) -> None:
|
||||
"""Send the media player the command to clear the playlist."""
|
||||
await safe_library_call(
|
||||
self._player.async_clear_playlist,
|
||||
translation_key="clear_playlist_failed",
|
||||
)
|
||||
"""Send the media player the command for clear playlist."""
|
||||
await self._player.async_clear_playlist()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_call_method(
|
||||
@@ -750,18 +692,12 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
"""Call Squeezebox JSON/RPC method.
|
||||
|
||||
Additional parameters are added to the command to form the list of
|
||||
positional parameters (p0, p1..., pN) passed to JSON/RPC server.
|
||||
positional parameters (p0, p1..., pN) passed to JSON/RPC server.
|
||||
"""
|
||||
all_params = [command]
|
||||
if parameters:
|
||||
all_params.extend(parameters)
|
||||
|
||||
await safe_library_call(
|
||||
self._player.async_query,
|
||||
*all_params,
|
||||
translation_key="call_method_failed",
|
||||
translation_placeholders={"command": command},
|
||||
)
|
||||
await self._player.async_query(*all_params)
|
||||
|
||||
async def async_call_query(
|
||||
self, command: str, parameters: list[str] | None = None
|
||||
@@ -769,18 +705,12 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
"""Call Squeezebox JSON/RPC method where we care about the result.
|
||||
|
||||
Additional parameters are added to the command to form the list of
|
||||
positional parameters (p0, p1..., pN) passed to JSON/RPC server.
|
||||
positional parameters (p0, p1..., pN) passed to JSON/RPC server.
|
||||
"""
|
||||
all_params = [command]
|
||||
if parameters:
|
||||
all_params.extend(parameters)
|
||||
|
||||
self._query_result = await safe_library_call(
|
||||
self._player.async_query,
|
||||
*all_params,
|
||||
translation_key="call_query_failed",
|
||||
translation_placeholders={"command": command},
|
||||
)
|
||||
self._query_result = await self._player.async_query(*all_params)
|
||||
_LOGGER.debug("call_query got result %s", self._query_result)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -814,10 +744,7 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Unsync this Squeezebox player."""
|
||||
await safe_library_call(
|
||||
self._player.async_unsync,
|
||||
translation_key="unjoin_failed",
|
||||
)
|
||||
await self._player.async_unsync()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
def get_synthetic_id_and_cache_url(self, url: str) -> str:
|
||||
@@ -881,19 +808,14 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
image_url = self._synthetic_media_browser_thumbnail_items.get(
|
||||
media_image_id
|
||||
)
|
||||
|
||||
if image_url is None:
|
||||
_LOGGER.debug("Synthetic ID %s not found in cache", media_image_id)
|
||||
return (None, None)
|
||||
else:
|
||||
image_url = await safe_library_call(
|
||||
self._player.generate_image_url_from_track_id,
|
||||
media_image_id,
|
||||
translation_key="generate_image_url_failed",
|
||||
translation_placeholders={"track_id": media_image_id},
|
||||
)
|
||||
image_url = self._player.generate_image_url_from_track_id(media_image_id)
|
||||
|
||||
result = await self._async_fetch_image(image_url)
|
||||
if result == (None, None):
|
||||
_LOGGER.debug("Error retrieving proxied album art from %s", image_url)
|
||||
|
||||
return result
|
||||
|
||||
@@ -207,69 +207,6 @@
|
||||
},
|
||||
"invalid_search_media_content_type": {
|
||||
"message": "If specified, Media content type must be one of {media_content_type}"
|
||||
},
|
||||
"turn_on_failed": {
|
||||
"message": "Failed to turn on the player."
|
||||
},
|
||||
"turn_off_failed": {
|
||||
"message": "Failed to turn off the player."
|
||||
},
|
||||
"set_shuffle_failed": {
|
||||
"message": "Failed to set shuffle mode."
|
||||
},
|
||||
"set_volume_failed": {
|
||||
"message": "Failed to set volume to {volume}%."
|
||||
},
|
||||
"set_mute_failed": {
|
||||
"message": "Failed to mute/unmute the player."
|
||||
},
|
||||
"stop_failed": {
|
||||
"message": "Failed to stop playback."
|
||||
},
|
||||
"play_pause_failed": {
|
||||
"message": "Failed to toggle play/pause."
|
||||
},
|
||||
"play_failed": {
|
||||
"message": "Failed to start playback."
|
||||
},
|
||||
"pause_failed": {
|
||||
"message": "Failed to pause playback."
|
||||
},
|
||||
"next_track_failed": {
|
||||
"message": "Failed to skip to the next track."
|
||||
},
|
||||
"previous_track_failed": {
|
||||
"message": "Failed to return to the previous track."
|
||||
},
|
||||
"seek_failed": {
|
||||
"message": "Failed to seek to position {position} seconds."
|
||||
},
|
||||
"set_repeat_failed": {
|
||||
"message": "Failed to set repeat mode."
|
||||
},
|
||||
"clear_playlist_failed": {
|
||||
"message": "Failed to clear the playlist."
|
||||
},
|
||||
"call_method_failed": {
|
||||
"message": "Failed to call method {command}."
|
||||
},
|
||||
"call_query_failed": {
|
||||
"message": "Failed to query method {command}."
|
||||
},
|
||||
"unjoin_failed": {
|
||||
"message": "Failed to unsync the player."
|
||||
},
|
||||
"press_failed": {
|
||||
"message": "Failed to execute button action {action}."
|
||||
},
|
||||
"load_url_failed": {
|
||||
"message": "Failed to load media URL {media_id} with command {cmd}."
|
||||
},
|
||||
"load_playlist_failed": {
|
||||
"message": "Failed to load playlist with command {cmd}."
|
||||
},
|
||||
"generate_image_url_failed": {
|
||||
"message": "Failed to generate image URL for track ID {track_id}."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
"""Utility functions for Squeezebox integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def safe_library_call(
|
||||
method: Callable[..., Awaitable[Any]],
|
||||
*args: Any,
|
||||
translation_key: str,
|
||||
translation_placeholders: dict[str, Any] | None = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Call a player method safely and raise HomeAssistantError on failure."""
|
||||
try:
|
||||
result = await method(*args, **kwargs)
|
||||
except ValueError:
|
||||
result = None
|
||||
|
||||
if result is False or result is None:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=translation_placeholders,
|
||||
)
|
||||
|
||||
return result
|
||||
@@ -200,13 +200,7 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
"""Name of this state."""
|
||||
return "<None>"
|
||||
|
||||
# Render the current variables and add a dummy this variable to them.
|
||||
variables = (
|
||||
self._run_variables
|
||||
if isinstance(self._run_variables, dict)
|
||||
else self._run_variables.async_render(self.hass, {})
|
||||
)
|
||||
variables = {"this": DummyState(), **variables}
|
||||
variables = {"this": DummyState()}
|
||||
|
||||
# Try to render the name as it can influence the entity ID
|
||||
self._attr_name = None
|
||||
|
||||
@@ -29,6 +29,7 @@ class ViCareEntity(Entity):
|
||||
gateway_serial = device_config.getConfig().serial
|
||||
device_id = device_config.getId()
|
||||
model = device_config.getModel().replace("_", " ")
|
||||
via_device_identifier: tuple[str, str] = ("", "")
|
||||
|
||||
identifier = (
|
||||
f"{gateway_serial}_{device_serial.replace('-', '_')}"
|
||||
@@ -36,6 +37,11 @@ class ViCareEntity(Entity):
|
||||
else f"{gateway_serial}_{device_id}"
|
||||
)
|
||||
|
||||
if device_serial is not None and device_serial.startswith("zigbee-"):
|
||||
parts = device_serial.split("-")
|
||||
if len(parts) == 3: # expect format zigbee-<zigbee-ieee>-<channel-id>
|
||||
via_device_identifier = (DOMAIN, f"{gateway_serial}_zigbee_{parts[1]}")
|
||||
|
||||
self._api: PyViCareDevice | PyViCareHeatingDeviceComponent = (
|
||||
component if component else device
|
||||
)
|
||||
@@ -50,13 +56,5 @@ class ViCareEntity(Entity):
|
||||
manufacturer="Viessmann",
|
||||
model=model,
|
||||
configuration_url=VIESSMANN_DEVELOPER_PORTAL,
|
||||
via_device=via_device_identifier,
|
||||
)
|
||||
|
||||
if device_serial and device_serial.startswith("zigbee-"):
|
||||
parts = device_serial.split("-", 2)
|
||||
if len(parts) == 3:
|
||||
_, zigbee_ieee, _ = parts
|
||||
self._attr_device_info["via_device"] = (
|
||||
DOMAIN,
|
||||
f"{gateway_serial}_zigbee_{zigbee_ieee}",
|
||||
)
|
||||
|
||||
@@ -89,18 +89,6 @@ class ViCareSensorEntityDescription(SensorEntityDescription, ViCareRequiredKeysM
|
||||
unit_getter: Callable[[PyViCareDevice], str | None] | None = None
|
||||
|
||||
|
||||
SUPPLY_TEMPERATURE_SENSOR: ViCareSensorEntityDescription = (
|
||||
ViCareSensorEntityDescription(
|
||||
key="supply_temperature",
|
||||
translation_key="supply_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_getter=lambda api: api.getSupplyTemperature(),
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
ViCareSensorEntityDescription(
|
||||
key="outside_temperature",
|
||||
@@ -990,11 +978,17 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_getter=lambda api: api.getHydraulicSeparatorTemperature(),
|
||||
),
|
||||
SUPPLY_TEMPERATURE_SENSOR,
|
||||
)
|
||||
|
||||
CIRCUIT_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
SUPPLY_TEMPERATURE_SENSOR,
|
||||
ViCareSensorEntityDescription(
|
||||
key="supply_temperature",
|
||||
translation_key="supply_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_getter=lambda api: api.getSupplyTemperature(),
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
|
||||
BURNER_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"""Victron Remote Monitoring energy platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
async def async_get_solar_forecast(
|
||||
hass: HomeAssistant, config_entry_id: str
|
||||
) -> dict[str, dict[str, float | int]] | None:
|
||||
"""Get solar forecast for a config entry ID."""
|
||||
if (
|
||||
entry := hass.config_entries.async_get_entry(config_entry_id)
|
||||
) is None or entry.state != ConfigEntryState.LOADED:
|
||||
return None
|
||||
data = entry.runtime_data.data.solar
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
return {"wh_hours": data.get_dict_isoformat}
|
||||
@@ -74,7 +74,6 @@ async def authenticate(
|
||||
not appliances_manager.aircons
|
||||
and not appliances_manager.washers
|
||||
and not appliances_manager.dryers
|
||||
and not appliances_manager.ovens
|
||||
):
|
||||
return "no_appliances"
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import logging
|
||||
|
||||
from whirlpool.appliance import Appliance
|
||||
from whirlpool.oven import Cavity as OvenCavity, Oven
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -65,31 +64,3 @@ class WhirlpoolEntity(Entity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="request_failed",
|
||||
)
|
||||
|
||||
|
||||
class WhirlpoolOvenEntity(WhirlpoolEntity):
|
||||
"""Base class for Whirlpool oven entities."""
|
||||
|
||||
_appliance: Oven
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
appliance: Oven,
|
||||
cavity: OvenCavity,
|
||||
translation_key_base: str | None,
|
||||
unique_id_suffix: str = "",
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.cavity = cavity
|
||||
cavity_suffix = ""
|
||||
if appliance.get_oven_cavity_exists(
|
||||
OvenCavity.Upper
|
||||
) and appliance.get_oven_cavity_exists(OvenCavity.Lower):
|
||||
if cavity == OvenCavity.Upper:
|
||||
cavity_suffix = "_upper"
|
||||
elif cavity == OvenCavity.Lower:
|
||||
cavity_suffix = "_lower"
|
||||
super().__init__(
|
||||
appliance, unique_id_suffix=f"{unique_id_suffix}{cavity_suffix}"
|
||||
)
|
||||
self._attr_translation_key = f"{translation_key_base}{cavity_suffix}"
|
||||
|
||||
@@ -6,15 +6,6 @@
|
||||
},
|
||||
"dryer_state": {
|
||||
"default": "mdi:tumble-dryer"
|
||||
},
|
||||
"oven_state": {
|
||||
"default": "mdi:stove"
|
||||
},
|
||||
"oven_state_upper": {
|
||||
"default": "mdi:stove"
|
||||
},
|
||||
"oven_state_lower": {
|
||||
"default": "mdi:stove"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,12 +8,6 @@ from typing import override
|
||||
|
||||
from whirlpool.appliance import Appliance
|
||||
from whirlpool.dryer import Dryer, MachineState as DryerMachineState
|
||||
from whirlpool.oven import (
|
||||
Cavity as OvenCavity,
|
||||
CavityState as OvenCavityState,
|
||||
CookMode,
|
||||
Oven,
|
||||
)
|
||||
from whirlpool.washer import MachineState as WasherMachineState, Washer
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -21,16 +15,14 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import WhirlpoolConfigEntry
|
||||
from .entity import WhirlpoolEntity, WhirlpoolOvenEntity
|
||||
from .entity import WhirlpoolEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
@@ -96,23 +88,6 @@ STATE_CYCLE_SOAKING = "cycle_soaking"
|
||||
STATE_CYCLE_SPINNING = "cycle_spinning"
|
||||
STATE_CYCLE_WASHING = "cycle_washing"
|
||||
|
||||
OVEN_CAVITY_STATE = {
|
||||
OvenCavityState.Standby: "standby",
|
||||
OvenCavityState.Preheating: "preheating",
|
||||
OvenCavityState.Cooking: "cooking",
|
||||
}
|
||||
|
||||
OVEN_COOK_MODE = {
|
||||
CookMode.Standby: "standby",
|
||||
CookMode.Bake: "bake",
|
||||
CookMode.ConvectBake: "convection_bake",
|
||||
CookMode.Broil: "broil",
|
||||
CookMode.ConvectBroil: "convection_broil",
|
||||
CookMode.ConvectRoast: "convection_roast",
|
||||
CookMode.KeepWarm: "keep_warm",
|
||||
CookMode.AirFry: "air_fry",
|
||||
}
|
||||
|
||||
|
||||
def washer_state(washer: Washer) -> str | None:
|
||||
"""Determine correct states for a washer."""
|
||||
@@ -208,59 +183,6 @@ WASHER_DRYER_TIME_SENSORS: tuple[SensorEntityDescription] = (
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class WhirlpoolOvenCavitySensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes a Whirlpool oven cavity sensor entity."""
|
||||
|
||||
value_fn: Callable[[Oven, OvenCavity], str | int | float | None]
|
||||
|
||||
|
||||
OVEN_CAVITY_SENSORS: tuple[WhirlpoolOvenCavitySensorEntityDescription, ...] = (
|
||||
WhirlpoolOvenCavitySensorEntityDescription(
|
||||
key="oven_state",
|
||||
translation_key="oven_state",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(OVEN_CAVITY_STATE.values()),
|
||||
value_fn=lambda oven, cavity: (
|
||||
OVEN_CAVITY_STATE.get(state)
|
||||
if (state := oven.get_cavity_state(cavity)) is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
WhirlpoolOvenCavitySensorEntityDescription(
|
||||
key="oven_cook_mode",
|
||||
translation_key="oven_cook_mode",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(OVEN_COOK_MODE.values()),
|
||||
value_fn=lambda oven, cavity: (
|
||||
OVEN_COOK_MODE.get(cook_mode)
|
||||
if (cook_mode := oven.get_cook_mode(cavity)) is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
WhirlpoolOvenCavitySensorEntityDescription(
|
||||
key="oven_current_temperature",
|
||||
translation_key="oven_current_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda oven, cavity: (
|
||||
temp if (temp := oven.get_temp(cavity)) != 0 else None
|
||||
),
|
||||
),
|
||||
WhirlpoolOvenCavitySensorEntityDescription(
|
||||
key="oven_target_temperature",
|
||||
translation_key="oven_target_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda oven, cavity: (
|
||||
temp if (temp := oven.get_target_temp(cavity)) != 0 else None
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: WhirlpoolConfigEntry,
|
||||
@@ -293,28 +215,12 @@ async def async_setup_entry(
|
||||
for description in WASHER_DRYER_TIME_SENSORS
|
||||
]
|
||||
|
||||
oven_upper_cavity_sensors = [
|
||||
WhirlpoolOvenCavitySensor(oven, OvenCavity.Upper, description)
|
||||
for oven in appliances_manager.ovens
|
||||
if oven.get_oven_cavity_exists(OvenCavity.Upper)
|
||||
for description in OVEN_CAVITY_SENSORS
|
||||
]
|
||||
|
||||
oven_lower_cavity_sensors = [
|
||||
WhirlpoolOvenCavitySensor(oven, OvenCavity.Lower, description)
|
||||
for oven in appliances_manager.ovens
|
||||
if oven.get_oven_cavity_exists(OvenCavity.Lower)
|
||||
for description in OVEN_CAVITY_SENSORS
|
||||
]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
*washer_sensors,
|
||||
*washer_time_sensors,
|
||||
*dryer_sensors,
|
||||
*dryer_time_sensors,
|
||||
*oven_upper_cavity_sensors,
|
||||
*oven_lower_cavity_sensors,
|
||||
]
|
||||
)
|
||||
|
||||
@@ -427,26 +333,3 @@ class DryerTimeSensor(WasherDryerTimeSensorBase):
|
||||
def _is_machine_state_running(self) -> bool:
|
||||
"""Return true if the machine is in a running state."""
|
||||
return self._appliance.get_machine_state() is DryerMachineState.RunningMainCycle
|
||||
|
||||
|
||||
class WhirlpoolOvenCavitySensor(WhirlpoolOvenEntity, SensorEntity):
|
||||
"""A class for Whirlpool oven cavity sensors."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
oven: Oven,
|
||||
cavity: OvenCavity,
|
||||
description: WhirlpoolOvenCavitySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the oven cavity sensor."""
|
||||
super().__init__(
|
||||
oven, cavity, description.translation_key, f"-{description.key}"
|
||||
)
|
||||
self.entity_description: WhirlpoolOvenCavitySensorEntityDescription = (
|
||||
description
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return native value of sensor."""
|
||||
return self.entity_description.value_fn(self._appliance, self.cavity)
|
||||
|
||||
@@ -120,87 +120,6 @@
|
||||
},
|
||||
"end_time": {
|
||||
"name": "End time"
|
||||
},
|
||||
"oven_state": {
|
||||
"name": "State",
|
||||
"state": {
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"preheating": "Preheating",
|
||||
"cooking": "Cooking"
|
||||
}
|
||||
},
|
||||
"oven_state_upper": {
|
||||
"name": "Upper oven state",
|
||||
"state": {
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"preheating": "[%key:component::whirlpool::entity::sensor::oven_state::state::preheating%]",
|
||||
"cooking": "[%key:component::whirlpool::entity::sensor::oven_state::state::cooking%]"
|
||||
}
|
||||
},
|
||||
"oven_state_lower": {
|
||||
"name": "Lower oven state",
|
||||
"state": {
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"preheating": "[%key:component::whirlpool::entity::sensor::oven_state::state::preheating%]",
|
||||
"cooking": "[%key:component::whirlpool::entity::sensor::oven_state::state::cooking%]"
|
||||
}
|
||||
},
|
||||
"oven_cook_mode": {
|
||||
"name": "Cook mode",
|
||||
"state": {
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"bake": "Bake",
|
||||
"convection_bake": "Convection bake",
|
||||
"broil": "Broil",
|
||||
"convection_broil": "Convection broil",
|
||||
"convection_roast": "Convection roast",
|
||||
"keep_warm": "Keep warm",
|
||||
"air_fry": "Air fry"
|
||||
}
|
||||
},
|
||||
"oven_cook_mode_upper": {
|
||||
"name": "Upper oven cook mode",
|
||||
"state": {
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"bake": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::bake%]",
|
||||
"convection_bake": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::convection_bake%]",
|
||||
"broil": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::broil%]",
|
||||
"convection_broil": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::convection_broil%]",
|
||||
"convection_roast": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::convection_roast%]",
|
||||
"keep_warm": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::keep_warm%]",
|
||||
"air_fry": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::air_fry%]"
|
||||
}
|
||||
},
|
||||
"oven_cook_mode_lower": {
|
||||
"name": "Lower oven cook mode",
|
||||
"state": {
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"bake": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::bake%]",
|
||||
"convection_bake": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::convection_bake%]",
|
||||
"broil": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::broil%]",
|
||||
"convection_broil": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::convection_broil%]",
|
||||
"convection_roast": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::convection_roast%]",
|
||||
"keep_warm": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::keep_warm%]",
|
||||
"air_fry": "[%key:component::whirlpool::entity::sensor::oven_cook_mode::state::air_fry%]"
|
||||
}
|
||||
},
|
||||
"oven_current_temperature": {
|
||||
"name": "Current temperature"
|
||||
},
|
||||
"oven_current_temperature_upper": {
|
||||
"name": "Upper oven current temperature"
|
||||
},
|
||||
"oven_current_temperature_lower": {
|
||||
"name": "Lower oven current temperature"
|
||||
},
|
||||
"oven_target_temperature": {
|
||||
"name": "Target temperature"
|
||||
},
|
||||
"oven_target_temperature_upper": {
|
||||
"name": "Upper oven target temperature"
|
||||
},
|
||||
"oven_target_temperature_lower": {
|
||||
"name": "Lower oven target temperature"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -304,7 +304,6 @@ FLOWS = {
|
||||
"immich",
|
||||
"improv_ble",
|
||||
"incomfort",
|
||||
"inels",
|
||||
"inkbird",
|
||||
"insteon",
|
||||
"intellifire",
|
||||
|
||||
@@ -3029,13 +3029,6 @@
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "opower"
|
||||
},
|
||||
"inels": {
|
||||
"name": "iNELS",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"single_config_entry": true
|
||||
},
|
||||
"influxdb": {
|
||||
"name": "InfluxDB",
|
||||
"integration_type": "hub",
|
||||
|
||||
3
homeassistant/generated/mqtt.py
generated
3
homeassistant/generated/mqtt.py
generated
@@ -16,9 +16,6 @@ MQTT = {
|
||||
"fully_kiosk": [
|
||||
"fully/deviceInfo/+",
|
||||
],
|
||||
"inels": [
|
||||
"inels/status/#",
|
||||
],
|
||||
"pglab": [
|
||||
"pglab/discovery/#",
|
||||
],
|
||||
|
||||
@@ -660,27 +660,19 @@ def _get_exposed_entities(
|
||||
|
||||
entity_entry = entity_registry.async_get(state.entity_id)
|
||||
names = [state.name]
|
||||
device_name = None
|
||||
area_names = []
|
||||
|
||||
if entity_entry is not None:
|
||||
names.extend(entity_entry.aliases)
|
||||
device = (
|
||||
device_registry.async_get(entity_entry.device_id)
|
||||
if entity_entry.device_id
|
||||
else None
|
||||
)
|
||||
|
||||
if device:
|
||||
device_name = device.name_by_user or device.name
|
||||
|
||||
if entity_entry.area_id and (
|
||||
area := area_registry.async_get_area(entity_entry.area_id)
|
||||
):
|
||||
# Entity is in area
|
||||
area_names.append(area.name)
|
||||
area_names.extend(area.aliases)
|
||||
elif device:
|
||||
elif entity_entry.device_id and (
|
||||
device := device_registry.async_get(entity_entry.device_id)
|
||||
):
|
||||
# Check device area
|
||||
if device.area_id and (
|
||||
area := area_registry.async_get_area(device.area_id)
|
||||
@@ -701,9 +693,6 @@ def _get_exposed_entities(
|
||||
if (parsed_utc := dt_util.parse_datetime(state.state)) is not None:
|
||||
info["state"] = dt_util.as_local(parsed_utc).isoformat()
|
||||
|
||||
if device_name and not state.name.lower().startswith(device_name.lower()):
|
||||
info["device"] = device_name
|
||||
|
||||
if area_names:
|
||||
info["areas"] = ", ".join(area_names)
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ from . import (
|
||||
selector,
|
||||
target as target_helpers,
|
||||
template,
|
||||
translation,
|
||||
)
|
||||
from .deprecation import deprecated_class, deprecated_function, deprecated_hass_argument
|
||||
from .selector import TargetSelector
|
||||
@@ -585,6 +586,11 @@ async def async_get_all_descriptions(
|
||||
_load_services_files, integrations
|
||||
)
|
||||
|
||||
# Load translations for all service domains
|
||||
translations = await translation.async_get_translations(
|
||||
hass, "en", "services", services
|
||||
)
|
||||
|
||||
# Build response
|
||||
descriptions: dict[str, dict[str, Any]] = {}
|
||||
for domain, services_map in services.items():
|
||||
@@ -611,11 +617,40 @@ async def async_get_all_descriptions(
|
||||
|
||||
# Don't warn for missing services, because it triggers false
|
||||
# positives for things like scripts, that register as a service
|
||||
description = {"fields": yaml_description.get("fields", {})}
|
||||
#
|
||||
# When name & description are in the translations use those;
|
||||
# otherwise fallback to backwards compatible behavior from
|
||||
# the time when we didn't have translations for descriptions yet.
|
||||
# This mimics the behavior of the frontend.
|
||||
description = {
|
||||
"name": translations.get(
|
||||
f"component.{domain}.services.{service_name}.name",
|
||||
yaml_description.get("name", ""),
|
||||
),
|
||||
"description": translations.get(
|
||||
f"component.{domain}.services.{service_name}.description",
|
||||
yaml_description.get("description", ""),
|
||||
),
|
||||
"fields": dict(yaml_description.get("fields", {})),
|
||||
}
|
||||
|
||||
for item in ("description", "name", "target"):
|
||||
if item in yaml_description:
|
||||
description[item] = yaml_description[item]
|
||||
# Translate fields names & descriptions as well
|
||||
for field_name, field_schema in description["fields"].items():
|
||||
if name := translations.get(
|
||||
f"component.{domain}.services.{service_name}.fields.{field_name}.name"
|
||||
):
|
||||
field_schema["name"] = name
|
||||
if desc := translations.get(
|
||||
f"component.{domain}.services.{service_name}.fields.{field_name}.description"
|
||||
):
|
||||
field_schema["description"] = desc
|
||||
if example := translations.get(
|
||||
f"component.{domain}.services.{service_name}.fields.{field_name}.example"
|
||||
):
|
||||
field_schema["example"] = example
|
||||
|
||||
if "target" in yaml_description:
|
||||
description["target"] = yaml_description["target"]
|
||||
|
||||
response = service.supports_response
|
||||
if response is not SupportsResponse.NONE:
|
||||
|
||||
10
mypy.ini
generated
10
mypy.ini
generated
@@ -2536,16 +2536,6 @@ disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.inels.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.input_button.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
|
||||
7
requirements_all.txt
generated
7
requirements_all.txt
generated
@@ -459,7 +459,7 @@ airgradient==0.9.2
|
||||
airly==1.1.0
|
||||
|
||||
# homeassistant.components.airos
|
||||
airos==0.6.0
|
||||
airos==0.5.6
|
||||
|
||||
# homeassistant.components.airthings_ble
|
||||
airthings-ble==1.1.1
|
||||
@@ -876,9 +876,6 @@ eliqonline==1.2.2
|
||||
# homeassistant.components.elkm1
|
||||
elkm1-lib==2.2.11
|
||||
|
||||
# homeassistant.components.inels
|
||||
elkoep-aio-mqtt==0.1.0b4
|
||||
|
||||
# homeassistant.components.elmax
|
||||
elmax-api==0.0.6.4rc0
|
||||
|
||||
@@ -1575,7 +1572,7 @@ notifications-android-tv==0.1.5
|
||||
notify-events==1.0.4
|
||||
|
||||
# homeassistant.components.nederlandse_spoorwegen
|
||||
nsapi==3.1.3
|
||||
nsapi==3.1.2
|
||||
|
||||
# homeassistant.components.nsw_fuel_station
|
||||
nsw-fuel-api-client==1.1.0
|
||||
|
||||
7
requirements_test_all.txt
generated
7
requirements_test_all.txt
generated
@@ -441,7 +441,7 @@ airgradient==0.9.2
|
||||
airly==1.1.0
|
||||
|
||||
# homeassistant.components.airos
|
||||
airos==0.6.0
|
||||
airos==0.5.6
|
||||
|
||||
# homeassistant.components.airthings_ble
|
||||
airthings-ble==1.1.1
|
||||
@@ -764,9 +764,6 @@ elgato==5.1.2
|
||||
# homeassistant.components.elkm1
|
||||
elkm1-lib==2.2.11
|
||||
|
||||
# homeassistant.components.inels
|
||||
elkoep-aio-mqtt==0.1.0b4
|
||||
|
||||
# homeassistant.components.elmax
|
||||
elmax-api==0.0.6.4rc0
|
||||
|
||||
@@ -1352,7 +1349,7 @@ notifications-android-tv==0.1.5
|
||||
notify-events==1.0.4
|
||||
|
||||
# homeassistant.components.nederlandse_spoorwegen
|
||||
nsapi==3.1.3
|
||||
nsapi==3.1.2
|
||||
|
||||
# homeassistant.components.nsw_fuel_station
|
||||
nsw-fuel-api-client==1.1.0
|
||||
|
||||
@@ -29,20 +29,7 @@ ARG QEMU_CPU
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${{BUILD_ARCH}}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
"armhf") go2rtc_suffix='armv6' ;; \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${{BUILD_ARCH}} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v{go2rtc}/go2rtc_linux_${{go2rtc_suffix}} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:{go2rtc} /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv=={uv}
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
]),
|
||||
'derived': dict({
|
||||
'access_point': True,
|
||||
'fw_major': None,
|
||||
'mac': '**REDACTED**',
|
||||
'mac_interface': 'br0',
|
||||
'mode': 'point_to_point',
|
||||
|
||||
@@ -5,13 +5,18 @@
|
||||
'domain': 'group',
|
||||
'services': dict({
|
||||
'reload': dict({
|
||||
'description': 'Reloads group configuration, entities, and notify services from YAML-configuration.',
|
||||
'fields': dict({
|
||||
}),
|
||||
'name': 'Reload',
|
||||
}),
|
||||
'remove': dict({
|
||||
'description': 'Removes a group.',
|
||||
'fields': dict({
|
||||
'object_id': dict({
|
||||
'description': 'Object ID of this group. This object ID is used as part of the entity ID. Entity ID format: [domain].[object_id].',
|
||||
'example': 'test_group',
|
||||
'name': 'Object ID',
|
||||
'required': True,
|
||||
'selector': dict({
|
||||
'object': dict({
|
||||
@@ -20,11 +25,15 @@
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'name': 'Remove',
|
||||
}),
|
||||
'set': dict({
|
||||
'description': 'Creates/Updates a group.',
|
||||
'fields': dict({
|
||||
'add_entities': dict({
|
||||
'description': 'List of members to be added to the group. Cannot be used in combination with `Entities` or `Remove entities`.',
|
||||
'example': 'domain.entity_id1, domain.entity_id2',
|
||||
'name': 'Add entities',
|
||||
'selector': dict({
|
||||
'entity': dict({
|
||||
'multiple': True,
|
||||
@@ -33,13 +42,17 @@
|
||||
}),
|
||||
}),
|
||||
'all': dict({
|
||||
'description': 'Enable this option if the group should only be used when all entities are in state `on`.',
|
||||
'name': 'All',
|
||||
'selector': dict({
|
||||
'boolean': dict({
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'entities': dict({
|
||||
'description': 'List of all members in the group. Cannot be used in combination with `Add entities` or `Remove entities`.',
|
||||
'example': 'domain.entity_id1, domain.entity_id2',
|
||||
'name': 'Entities',
|
||||
'selector': dict({
|
||||
'entity': dict({
|
||||
'multiple': True,
|
||||
@@ -48,14 +61,18 @@
|
||||
}),
|
||||
}),
|
||||
'icon': dict({
|
||||
'description': 'Name of the icon for the group.',
|
||||
'example': 'mdi:camera',
|
||||
'name': 'Icon',
|
||||
'selector': dict({
|
||||
'icon': dict({
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'name': dict({
|
||||
'description': 'Name of the group.',
|
||||
'example': 'My test group',
|
||||
'name': 'Name',
|
||||
'selector': dict({
|
||||
'text': dict({
|
||||
'multiline': False,
|
||||
@@ -64,7 +81,9 @@
|
||||
}),
|
||||
}),
|
||||
'object_id': dict({
|
||||
'description': 'Object ID of this group. This object ID is used as part of the entity ID. Entity ID format: [domain].[object_id].',
|
||||
'example': 'test_group',
|
||||
'name': 'Object ID',
|
||||
'required': True,
|
||||
'selector': dict({
|
||||
'text': dict({
|
||||
@@ -74,7 +93,9 @@
|
||||
}),
|
||||
}),
|
||||
'remove_entities': dict({
|
||||
'description': 'List of members to be removed from a group. Cannot be used in combination with `Entities` or `Add entities`.',
|
||||
'example': 'domain.entity_id1, domain.entity_id2',
|
||||
'name': 'Remove entities',
|
||||
'selector': dict({
|
||||
'entity': dict({
|
||||
'multiple': True,
|
||||
@@ -83,6 +104,7 @@
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'name': 'Set',
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
@@ -117,8 +139,10 @@
|
||||
'name': 'Translated name',
|
||||
}),
|
||||
'set_level': dict({
|
||||
'description': '',
|
||||
'fields': dict({
|
||||
}),
|
||||
'name': '',
|
||||
}),
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -375,6 +375,10 @@ async def test_api_get_services(
|
||||
"homeassistant.helpers.service._load_services_file",
|
||||
side_effect=_load_services_file,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
return_value={},
|
||||
),
|
||||
):
|
||||
resp = await mock_api_client.get(const.URL_API_SERVICES)
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
"""Tests for the iNELS integration."""
|
||||
|
||||
HA_INELS_PATH = "homeassistant.components.inels"
|
||||
@@ -1,133 +0,0 @@
|
||||
"""Common methods used across tests."""
|
||||
|
||||
from homeassistant.components import inels
|
||||
from homeassistant.components.inels.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
__all__ = [
|
||||
"MockConfigEntry",
|
||||
"get_entity_id",
|
||||
"get_entity_state",
|
||||
"inels",
|
||||
"old_entity_and_device_removal",
|
||||
"set_mock_mqtt",
|
||||
]
|
||||
|
||||
MAC_ADDRESS = "001122334455"
|
||||
UNIQUE_ID = "C0FFEE"
|
||||
CONNECTED_INELS_VALUE = b"on\n"
|
||||
DISCONNECTED_INELS_VALUE = b"off\n"
|
||||
|
||||
|
||||
def get_entity_id(entity_config: dict, index: int) -> str:
|
||||
"""Construct the entity_id based on the entity_config."""
|
||||
unique_id = entity_config["unique_id"].lower()
|
||||
base_id = f"{entity_config['entity_type']}.{MAC_ADDRESS}_{unique_id}_{entity_config['device_type']}"
|
||||
return f"{base_id}{f'_{index:03}'}" if index is not None else base_id
|
||||
|
||||
|
||||
def get_entity_state(
|
||||
hass: HomeAssistant, entity_config: dict, index: int
|
||||
) -> State | None:
|
||||
"""Return the state of the entity from the state machine."""
|
||||
entity_id = get_entity_id(entity_config, index)
|
||||
return hass.states.get(entity_id)
|
||||
|
||||
|
||||
def set_mock_mqtt(
|
||||
mqtt,
|
||||
config: dict,
|
||||
status_value: bytes,
|
||||
device_available: bool = True,
|
||||
gw_available: bool = True,
|
||||
last_value=None,
|
||||
):
|
||||
"""Set mock mqtt communication."""
|
||||
gw_connected_value = '{"status":true}' if gw_available else '{"status":false}'
|
||||
device_connected_value = (
|
||||
CONNECTED_INELS_VALUE if device_available else DISCONNECTED_INELS_VALUE
|
||||
)
|
||||
|
||||
mqtt.mock_messages = {
|
||||
config["gw_connected_topic"]: gw_connected_value,
|
||||
config["connected_topic"]: device_connected_value,
|
||||
config["status_topic"]: status_value,
|
||||
}
|
||||
mqtt.mock_discovery_all = {config["base_topic"]: status_value}
|
||||
|
||||
if last_value is not None:
|
||||
mqtt.mock_last_value = {config["status_topic"]: last_value}
|
||||
else:
|
||||
mqtt.mock_last_value = {}
|
||||
|
||||
|
||||
async def old_entity_and_device_removal(
|
||||
hass: HomeAssistant, mock_mqtt, platform, entity_config, value_key, index
|
||||
):
|
||||
"""Test that old entities are correctly identified and removed across different platforms."""
|
||||
|
||||
set_mock_mqtt(
|
||||
mock_mqtt,
|
||||
config=entity_config,
|
||||
status_value=entity_config[value_key],
|
||||
gw_available=True,
|
||||
device_available=True,
|
||||
)
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
data={},
|
||||
domain=DOMAIN,
|
||||
title="iNELS",
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
# Create an old entity
|
||||
entity_registry = er.async_get(hass)
|
||||
old_entity = entity_registry.async_get_or_create(
|
||||
domain=platform,
|
||||
platform=DOMAIN,
|
||||
unique_id=f"old_{entity_config['unique_id']}",
|
||||
suggested_object_id=f"old_inels_{platform}_{entity_config['device_type']}",
|
||||
config_entry=config_entry,
|
||||
)
|
||||
|
||||
# Create a device and associate it with the old entity
|
||||
device_registry = dr.async_get(hass)
|
||||
device = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
identifiers={(DOMAIN, f"old_{entity_config['unique_id']}")},
|
||||
name=f"iNELS {platform.capitalize()} {entity_config['device_type']}",
|
||||
manufacturer="iNELS",
|
||||
model=entity_config["device_type"],
|
||||
)
|
||||
|
||||
# Associate the old entity with the device
|
||||
entity_registry.async_update_entity(old_entity.entity_id, device_id=device.id)
|
||||
|
||||
assert (
|
||||
device_registry.async_get_device({(DOMAIN, old_entity.unique_id)}) is not None
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# The device was discovered, and at this point, the async_remove_old_entities function was called
|
||||
assert config_entry.runtime_data.devices
|
||||
assert old_entity.entity_id not in config_entry.runtime_data.old_entities[platform]
|
||||
|
||||
# Get the new entity
|
||||
new_entity = entity_registry.async_get(get_entity_id(entity_config, index).lower())
|
||||
|
||||
assert new_entity is not None
|
||||
|
||||
# Verify that the new entity is in the registry
|
||||
assert entity_registry.async_get(new_entity.entity_id) is not None
|
||||
|
||||
# Verify that the old entity is no longer in the registry
|
||||
assert entity_registry.async_get(old_entity.entity_id) is None
|
||||
|
||||
# Verify that the device no longer exists in the registry
|
||||
assert device_registry.async_get_device({(DOMAIN, old_entity.unique_id)}) is None
|
||||
@@ -1,119 +0,0 @@
|
||||
"""Test fixtures."""
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import HA_INELS_PATH
|
||||
from .common import DOMAIN, MockConfigEntry, get_entity_state, set_mock_mqtt
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_mqtt")
|
||||
def mock_inelsmqtt_fixture():
|
||||
"""Mock inels mqtt lib."""
|
||||
|
||||
def messages():
|
||||
"""Return mocked messages."""
|
||||
return mqtt.mock_messages
|
||||
|
||||
def last_value(topic):
|
||||
"""Mock last_value to return None if mock_last_value is empty or topic doesn't exist."""
|
||||
return mqtt.mock_last_value.get(topic) if mqtt.mock_last_value else None
|
||||
|
||||
async def discovery_all():
|
||||
"""Return mocked discovered devices."""
|
||||
return mqtt.mock_discovery_all
|
||||
|
||||
async def subscribe(topic, qos=0, options=None, properties=None):
|
||||
"""Mock subscribe fnc."""
|
||||
if isinstance(topic, list):
|
||||
return {t: mqtt.mock_messages.get(t) for t in topic}
|
||||
return mqtt.mock_messages.get(topic)
|
||||
|
||||
async def publish(topic, payload, qos=0, retain=True, properties=None):
|
||||
"""Mock publish to change value of the device."""
|
||||
mqtt.mock_messages[topic] = payload
|
||||
|
||||
unsubscribe_topics = AsyncMock()
|
||||
unsubscribe_listeners = Mock()
|
||||
|
||||
mqtt = Mock(
|
||||
messages=messages,
|
||||
subscribe=subscribe,
|
||||
publish=publish,
|
||||
last_value=last_value,
|
||||
discovery_all=discovery_all,
|
||||
unsubscribe_topics=unsubscribe_topics,
|
||||
unsubscribe_listeners=unsubscribe_listeners,
|
||||
mock_last_value=dict[str, Any](),
|
||||
mock_messages=dict[str, Any](),
|
||||
mock_discovery_all=dict[str, Any](),
|
||||
)
|
||||
|
||||
with (
|
||||
patch(f"{HA_INELS_PATH}.InelsMqtt", return_value=mqtt),
|
||||
):
|
||||
yield mqtt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_reload_entry():
|
||||
"""Mock the async_reload_entry function."""
|
||||
with patch(f"{HA_INELS_PATH}.async_reload_entry") as mock_reload:
|
||||
yield mock_reload
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_entity(hass: HomeAssistant, mock_mqtt):
|
||||
"""Set up an entity for testing with specified configuration and status."""
|
||||
|
||||
async def _setup(
|
||||
entity_config,
|
||||
status_value: bytes,
|
||||
device_available: bool = True,
|
||||
gw_available: bool = True,
|
||||
last_value=None,
|
||||
index: int | None = None,
|
||||
):
|
||||
set_mock_mqtt(
|
||||
mock_mqtt,
|
||||
config=entity_config,
|
||||
status_value=status_value,
|
||||
gw_available=gw_available,
|
||||
device_available=device_available,
|
||||
last_value=last_value,
|
||||
)
|
||||
await setup_inels_test_integration(hass)
|
||||
return get_entity_state(hass, entity_config, index)
|
||||
|
||||
return _setup
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def entity_config(request: pytest.FixtureRequest):
|
||||
"""Fixture to provide parameterized entity configuration."""
|
||||
# This fixture will be parameterized in each test file
|
||||
return request.param
|
||||
|
||||
|
||||
async def setup_inels_test_integration(hass: HomeAssistant):
|
||||
"""Load inels integration with mocked mqtt broker."""
|
||||
hass.config.components.add(DOMAIN)
|
||||
|
||||
entry = MockConfigEntry(
|
||||
data={},
|
||||
domain=DOMAIN,
|
||||
title="iNELS",
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
with (
|
||||
patch(f"{HA_INELS_PATH}.ha_mqtt.async_wait_for_mqtt_client", return_value=True),
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert DOMAIN in hass.config.components
|
||||
@@ -1,170 +0,0 @@
|
||||
"""Test the iNELS config flow."""
|
||||
|
||||
from homeassistant.components.inels.const import DOMAIN, TITLE
|
||||
from homeassistant.components.mqtt import MQTT_CONNECTION_STATE
|
||||
from homeassistant.config_entries import SOURCE_MQTT, SOURCE_USER
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.service_info.mqtt import MqttServiceInfo
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.typing import MqttMockHAClient
|
||||
|
||||
|
||||
async def test_mqtt_config_single_instance(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""The MQTT test flow is aborted if an entry already exists."""
|
||||
|
||||
MockConfigEntry(domain=DOMAIN).add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_MQTT}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
|
||||
|
||||
async def test_mqtt_setup(hass: HomeAssistant, mqtt_mock: MqttMockHAClient) -> None:
|
||||
"""When an MQTT message is received on the discovery topic, it triggers a config flow."""
|
||||
discovery_info = MqttServiceInfo(
|
||||
topic="inels/status/MAC_ADDRESS/gw",
|
||||
payload='{"CUType":"CU3-08M","Status":"Runfast","FW":"02.97.18"}',
|
||||
qos=0,
|
||||
retain=False,
|
||||
subscribed_topic="inels/status/#",
|
||||
timestamp=None,
|
||||
)
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_MQTT}, data=discovery_info
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
|
||||
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == TITLE
|
||||
assert result["result"].data == {}
|
||||
|
||||
|
||||
async def test_mqtt_abort_invalid_topic(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""Check MQTT flow aborts if discovery topic is invalid."""
|
||||
discovery_info = MqttServiceInfo(
|
||||
topic="inels/status/MAC_ADDRESS/wrong_topic",
|
||||
payload='{"CUType":"CU3-08M","Status":"Runfast","FW":"02.97.18"}',
|
||||
qos=0,
|
||||
retain=False,
|
||||
subscribed_topic="inels/status/#",
|
||||
timestamp=None,
|
||||
)
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_MQTT}, data=discovery_info
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "invalid_discovery_info"
|
||||
|
||||
|
||||
async def test_mqtt_abort_empty_payload(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""Check MQTT flow aborts if discovery payload is empty."""
|
||||
discovery_info = MqttServiceInfo(
|
||||
topic="inels/status/MAC_ADDRESS/gw",
|
||||
payload="",
|
||||
qos=0,
|
||||
retain=False,
|
||||
subscribed_topic="inels/status/#",
|
||||
timestamp=None,
|
||||
)
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_MQTT}, data=discovery_info
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "invalid_discovery_info"
|
||||
|
||||
|
||||
async def test_mqtt_abort_already_in_progress(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""Test that a second MQTT flow is aborted when one is already in progress."""
|
||||
discovery_info = MqttServiceInfo(
|
||||
topic="inels/status/MAC_ADDRESS/gw",
|
||||
payload='{"CUType":"CU3-08M","Status":"Runfast","FW":"02.97.18"}',
|
||||
qos=0,
|
||||
retain=False,
|
||||
subscribed_topic="inels/status/#",
|
||||
timestamp=None,
|
||||
)
|
||||
|
||||
# Start first MQTT flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_MQTT}, data=discovery_info
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
# Try to start second MQTT flow while first is in progress
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_MQTT}, data=discovery_info
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_in_progress"
|
||||
|
||||
|
||||
async def test_user_setup(hass: HomeAssistant, mqtt_mock: MqttMockHAClient) -> None:
|
||||
"""Test if the user can finish a config flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == TITLE
|
||||
assert result["result"].data == {}
|
||||
|
||||
|
||||
async def test_user_config_single_instance(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""The user test flow is aborted if an entry already exists."""
|
||||
MockConfigEntry(domain=DOMAIN).add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
|
||||
|
||||
async def test_user_setup_mqtt_not_connected(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""The user setup test flow is aborted when MQTT is not connected."""
|
||||
|
||||
mqtt_mock.connected = False
|
||||
async_dispatcher_send(hass, MQTT_CONNECTION_STATE, False)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "mqtt_not_connected"
|
||||
|
||||
|
||||
async def test_user_setup_mqtt_not_configured(hass: HomeAssistant) -> None:
|
||||
"""The user setup test flow is aborted when MQTT is not configured."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "mqtt_not_configured"
|
||||
@@ -1,102 +0,0 @@
|
||||
"""Tests for iNELS integration."""
|
||||
|
||||
from unittest.mock import ANY, AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from . import HA_INELS_PATH
|
||||
from .common import DOMAIN, inels
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.typing import MqttMockHAClient
|
||||
|
||||
|
||||
async def test_ha_mqtt_publish(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""Test that MQTT publish function works correctly."""
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with (
|
||||
patch(f"{HA_INELS_PATH}.InelsDiscovery") as mock_discovery_class,
|
||||
patch(
|
||||
"homeassistant.config_entries.ConfigEntries.async_forward_entry_setups",
|
||||
return_value=None,
|
||||
),
|
||||
):
|
||||
mock_discovery = AsyncMock()
|
||||
mock_discovery.start.return_value = []
|
||||
mock_discovery_class.return_value = mock_discovery
|
||||
|
||||
await inels.async_setup_entry(hass, config_entry)
|
||||
|
||||
topic, payload, qos, retain = "test/topic", "test_payload", 1, True
|
||||
|
||||
await config_entry.runtime_data.mqtt.publish(topic, payload, qos, retain)
|
||||
mqtt_mock.async_publish.assert_called_once_with(topic, payload, qos, retain)
|
||||
|
||||
|
||||
async def test_ha_mqtt_subscribe(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""Test that MQTT subscribe function works correctly."""
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with (
|
||||
patch(f"{HA_INELS_PATH}.InelsDiscovery") as mock_discovery_class,
|
||||
patch(
|
||||
"homeassistant.config_entries.ConfigEntries.async_forward_entry_setups",
|
||||
return_value=None,
|
||||
),
|
||||
):
|
||||
mock_discovery = AsyncMock()
|
||||
mock_discovery.start.return_value = []
|
||||
mock_discovery_class.return_value = mock_discovery
|
||||
|
||||
await inels.async_setup_entry(hass, config_entry)
|
||||
|
||||
topic = "test/topic"
|
||||
|
||||
await config_entry.runtime_data.mqtt.subscribe(topic)
|
||||
mqtt_mock.async_subscribe.assert_any_call(topic, ANY, 0, "utf-8", None)
|
||||
|
||||
|
||||
async def test_ha_mqtt_not_available(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient
|
||||
) -> None:
|
||||
"""Test that ConfigEntryNotReady is raised when MQTT is not available."""
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.mqtt.async_wait_for_mqtt_client",
|
||||
return_value=False,
|
||||
),
|
||||
pytest.raises(ConfigEntryNotReady, match="MQTT integration not available"),
|
||||
):
|
||||
await inels.async_setup_entry(hass, config_entry)
|
||||
|
||||
|
||||
async def test_unload_entry(hass: HomeAssistant, mock_mqtt) -> None:
|
||||
"""Test unload entry."""
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
config_entry.runtime_data = inels.InelsData(mqtt=mock_mqtt, devices=[])
|
||||
|
||||
with patch(
|
||||
"homeassistant.config_entries.ConfigEntries.async_unload_platforms",
|
||||
return_value=True,
|
||||
) as mock_unload_platforms:
|
||||
result = await inels.async_unload_entry(hass, config_entry)
|
||||
|
||||
assert result is True
|
||||
mock_mqtt.unsubscribe_topics.assert_called_once()
|
||||
mock_mqtt.unsubscribe_listeners.assert_called_once()
|
||||
mock_unload_platforms.assert_called_once()
|
||||
@@ -1,167 +0,0 @@
|
||||
"""iNELS switch platform testing."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
SERVICE_TURN_ON,
|
||||
)
|
||||
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .common import MAC_ADDRESS, UNIQUE_ID, get_entity_id
|
||||
|
||||
DT_07 = "07"
|
||||
DT_100 = "100"
|
||||
DT_BITS = "bits"
|
||||
|
||||
|
||||
@pytest.fixture(params=["simple_relay", "relay", "bit"])
|
||||
def entity_config(request: pytest.FixtureRequest):
|
||||
"""Fixture to provide parameterized entity configuration for switch tests."""
|
||||
configs = {
|
||||
"simple_relay": {
|
||||
"entity_type": "switch",
|
||||
"device_type": "simple_relay",
|
||||
"dev_type": DT_07,
|
||||
"unique_id": UNIQUE_ID,
|
||||
"gw_connected_topic": f"inels/connected/{MAC_ADDRESS}/gw",
|
||||
"connected_topic": f"inels/connected/{MAC_ADDRESS}/{DT_07}/{UNIQUE_ID}",
|
||||
"status_topic": f"inels/status/{MAC_ADDRESS}/{DT_07}/{UNIQUE_ID}",
|
||||
"base_topic": f"{MAC_ADDRESS}/{DT_07}/{UNIQUE_ID}",
|
||||
"switch_on_value": "07\n01\n92\n09\n",
|
||||
"switch_off_value": "07\n00\n92\n09\n",
|
||||
},
|
||||
"relay": {
|
||||
"entity_type": "switch",
|
||||
"device_type": "relay",
|
||||
"dev_type": DT_100,
|
||||
"unique_id": UNIQUE_ID,
|
||||
"gw_connected_topic": f"inels/connected/{MAC_ADDRESS}/gw",
|
||||
"connected_topic": f"inels/connected/{MAC_ADDRESS}/{DT_100}/{UNIQUE_ID}",
|
||||
"status_topic": f"inels/status/{MAC_ADDRESS}/{DT_100}/{UNIQUE_ID}",
|
||||
"base_topic": f"{MAC_ADDRESS}/{DT_100}/{UNIQUE_ID}",
|
||||
"switch_on_value": "07\n00\n0A\n28\n00\n",
|
||||
"switch_off_value": "06\n00\n0A\n28\n00\n",
|
||||
"alerts": {
|
||||
"overflow": "06\n00\n0A\n28\n01\n",
|
||||
},
|
||||
},
|
||||
"bit": {
|
||||
"entity_type": "switch",
|
||||
"device_type": "bit",
|
||||
"dev_type": DT_BITS,
|
||||
"unique_id": UNIQUE_ID,
|
||||
"gw_connected_topic": f"inels/connected/{MAC_ADDRESS}/gw",
|
||||
"connected_topic": f"inels/connected/{MAC_ADDRESS}/{DT_BITS}/{UNIQUE_ID}",
|
||||
"status_topic": f"inels/status/{MAC_ADDRESS}/{DT_BITS}/{UNIQUE_ID}",
|
||||
"base_topic": f"{MAC_ADDRESS}/{DT_BITS}/{UNIQUE_ID}",
|
||||
"switch_on_value": b'{"state":{"000":1,"001":1}}',
|
||||
"switch_off_value": b'{"state":{"000":0,"001":0}}',
|
||||
},
|
||||
}
|
||||
return configs[request.param]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"entity_config", ["simple_relay", "relay", "bit"], indirect=True
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("gw_available", "device_available", "expected_state"),
|
||||
[
|
||||
(True, False, STATE_UNAVAILABLE),
|
||||
(False, True, STATE_UNAVAILABLE),
|
||||
(True, True, STATE_ON),
|
||||
],
|
||||
)
|
||||
async def test_switch_availability(
|
||||
hass: HomeAssistant,
|
||||
setup_entity,
|
||||
entity_config,
|
||||
gw_available,
|
||||
device_available,
|
||||
expected_state,
|
||||
) -> None:
|
||||
"""Test switch availability and state under different gateway and device availability conditions."""
|
||||
|
||||
switch_state = await setup_entity(
|
||||
entity_config,
|
||||
status_value=entity_config["switch_on_value"],
|
||||
gw_available=gw_available,
|
||||
device_available=device_available,
|
||||
index=0 if entity_config["device_type"] == "bit" else None,
|
||||
)
|
||||
|
||||
assert switch_state is not None
|
||||
assert switch_state.state == expected_state
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("entity_config", "index"),
|
||||
[
|
||||
("simple_relay", None),
|
||||
("relay", None),
|
||||
("bit", 0),
|
||||
],
|
||||
indirect=["entity_config"],
|
||||
)
|
||||
async def test_switch_turn_on(
|
||||
hass: HomeAssistant, setup_entity, entity_config, index
|
||||
) -> None:
|
||||
"""Test turning on a switch."""
|
||||
switch_state = await setup_entity(
|
||||
entity_config, status_value=entity_config["switch_off_value"], index=index
|
||||
)
|
||||
|
||||
assert switch_state is not None
|
||||
assert switch_state.state == STATE_OFF
|
||||
|
||||
with patch("inelsmqtt.devices.Device.set_ha_value") as mock_set_state:
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: get_entity_id(entity_config, index)},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
mock_set_state.assert_called_once()
|
||||
|
||||
ha_value = mock_set_state.call_args.args[0]
|
||||
assert getattr(ha_value, entity_config["device_type"])[0].is_on is True
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("entity_config", "index"),
|
||||
[
|
||||
("simple_relay", None),
|
||||
("relay", None),
|
||||
("bit", 0),
|
||||
],
|
||||
indirect=["entity_config"],
|
||||
)
|
||||
async def test_switch_turn_off(
|
||||
hass: HomeAssistant, setup_entity, entity_config, index
|
||||
) -> None:
|
||||
"""Test turning off a switch."""
|
||||
switch_state = await setup_entity(
|
||||
entity_config, status_value=entity_config["switch_on_value"], index=index
|
||||
)
|
||||
|
||||
assert switch_state is not None
|
||||
assert switch_state.state == STATE_ON
|
||||
|
||||
with patch("inelsmqtt.devices.Device.set_ha_value") as mock_set_state:
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: get_entity_id(entity_config, index)},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
mock_set_state.assert_called_once()
|
||||
|
||||
ha_value = mock_set_state.call_args.args[0]
|
||||
assert getattr(ha_value, entity_config["device_type"])[0].is_on is False
|
||||
@@ -59,31 +59,29 @@ def mock_config_entry() -> MockConfigEntry:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_nextdns() -> Generator[AsyncMock]:
|
||||
"""Mock the NextDns class."""
|
||||
def mock_nextdns_client() -> Generator[AsyncMock]:
|
||||
"""Mock a NextDNS client."""
|
||||
|
||||
with (
|
||||
patch("homeassistant.components.nextdns.NextDns", autospec=True) as mock_class,
|
||||
patch("homeassistant.components.nextdns.config_flow.NextDns", new=mock_class),
|
||||
patch("homeassistant.components.nextdns.NextDns", autospec=True) as mock_client,
|
||||
patch(
|
||||
"homeassistant.components.nextdns.config_flow.NextDns",
|
||||
new=mock_client,
|
||||
),
|
||||
):
|
||||
yield mock_class
|
||||
client = mock_client.create.return_value
|
||||
client.clear_logs.return_value = True
|
||||
client.connection_status.return_value = CONNECTION_STATUS
|
||||
client.get_analytics_dnssec.return_value = ANALYTICS_DNSSEC
|
||||
client.get_analytics_encryption.return_value = ANALYTICS_ENCRYPTION
|
||||
client.get_analytics_ip_versions.return_value = ANALYTICS_IP_VERSIONS
|
||||
client.get_analytics_protocols.return_value = ANALYTICS_PROTOCOLS
|
||||
client.get_analytics_status.return_value = ANALYTICS_STATUS
|
||||
client.get_profile_id = Mock(return_value="xyz12")
|
||||
client.get_profile_name = Mock(return_value="Fake Profile")
|
||||
client.get_profiles.return_value = PROFILES
|
||||
client.get_settings.return_value = SETTINGS
|
||||
client.set_setting.return_value = True
|
||||
client.profiles = [ProfileInfo(**PROFILES[0])]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_nextdns_client(mock_nextdns: AsyncMock) -> AsyncMock:
|
||||
"""Mock a NextDNS client instance."""
|
||||
client = mock_nextdns.create.return_value
|
||||
client.clear_logs.return_value = True
|
||||
client.connection_status.return_value = CONNECTION_STATUS
|
||||
client.get_analytics_dnssec.return_value = ANALYTICS_DNSSEC
|
||||
client.get_analytics_encryption.return_value = ANALYTICS_ENCRYPTION
|
||||
client.get_analytics_ip_versions.return_value = ANALYTICS_IP_VERSIONS
|
||||
client.get_analytics_protocols.return_value = ANALYTICS_PROTOCOLS
|
||||
client.get_analytics_status.return_value = ANALYTICS_STATUS
|
||||
client.get_profile_id = Mock(return_value="xyz12")
|
||||
client.get_profile_name = Mock(return_value="Fake Profile")
|
||||
client.get_profiles.return_value = PROFILES
|
||||
client.get_settings.return_value = SETTINGS
|
||||
client.set_setting.return_value = True
|
||||
client.profiles = [ProfileInfo(**PROFILES[0])]
|
||||
|
||||
return client
|
||||
yield client
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Define tests for the NextDNS config flow."""
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from nextdns import ApiError, InvalidApiKeyError
|
||||
import pytest
|
||||
@@ -21,7 +21,6 @@ async def test_form_create_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_nextdns_client: AsyncMock,
|
||||
mock_nextdns: AsyncMock,
|
||||
) -> None:
|
||||
"""Test that the user step works."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@@ -65,7 +64,6 @@ async def test_form_errors(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_nextdns_client: AsyncMock,
|
||||
mock_nextdns: AsyncMock,
|
||||
exc: Exception,
|
||||
base_error: str,
|
||||
) -> None:
|
||||
@@ -76,18 +74,18 @@ async def test_form_errors(
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
mock_nextdns.create.side_effect = exc
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_API_KEY: "fake_api_key"},
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.components.nextdns.NextDns.create",
|
||||
side_effect=exc,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_API_KEY: "fake_api_key"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": base_error}
|
||||
|
||||
mock_nextdns.create.side_effect = None
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_API_KEY: "fake_api_key"},
|
||||
@@ -112,7 +110,6 @@ async def test_form_already_configured(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_nextdns_client: AsyncMock,
|
||||
mock_nextdns: AsyncMock,
|
||||
) -> None:
|
||||
"""Test that errors are shown when duplicates are added."""
|
||||
await init_integration(hass, mock_config_entry)
|
||||
@@ -138,7 +135,6 @@ async def test_reauth_successful(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_nextdns_client: AsyncMock,
|
||||
mock_nextdns: AsyncMock,
|
||||
) -> None:
|
||||
"""Test starting a reauthentication flow."""
|
||||
await init_integration(hass, mock_config_entry)
|
||||
@@ -172,7 +168,6 @@ async def test_reauth_errors(
|
||||
base_error: str,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_nextdns_client: AsyncMock,
|
||||
mock_nextdns: AsyncMock,
|
||||
) -> None:
|
||||
"""Test reauthentication flow with errors."""
|
||||
await init_integration(hass, mock_config_entry)
|
||||
@@ -181,17 +176,14 @@ async def test_reauth_errors(
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reauth_confirm"
|
||||
|
||||
mock_nextdns.create.side_effect = exc
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={CONF_API_KEY: "new_api_key"},
|
||||
)
|
||||
with patch("homeassistant.components.nextdns.NextDns.create", side_effect=exc):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={CONF_API_KEY: "new_api_key"},
|
||||
)
|
||||
|
||||
assert result["errors"] == {"base": base_error}
|
||||
|
||||
mock_nextdns.create.side_effect = None
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={CONF_API_KEY: "new_api_key"},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Test init of NextDNS integration."""
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from nextdns import ApiError, InvalidApiKeyError
|
||||
import pytest
|
||||
@@ -36,13 +36,15 @@ async def test_async_setup_entry(
|
||||
async def test_config_not_ready(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_nextdns: AsyncMock,
|
||||
mock_nextdns_client: AsyncMock,
|
||||
exc: Exception,
|
||||
) -> None:
|
||||
"""Test for setup failure if the connection to the service fails."""
|
||||
mock_nextdns.create.side_effect = exc
|
||||
|
||||
await init_integration(hass, mock_config_entry)
|
||||
with patch(
|
||||
"homeassistant.components.nextdns.NextDns.create",
|
||||
side_effect=exc,
|
||||
):
|
||||
await init_integration(hass, mock_config_entry)
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
|
||||
@@ -51,7 +53,6 @@ async def test_unload_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_nextdns_client: AsyncMock,
|
||||
mock_nextdns: AsyncMock,
|
||||
) -> None:
|
||||
"""Test successful unload of entry."""
|
||||
await init_integration(hass, mock_config_entry)
|
||||
@@ -69,12 +70,14 @@ async def test_unload_entry(
|
||||
async def test_config_auth_failed(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_nextdns: AsyncMock,
|
||||
mock_nextdns_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test for setup failure if the auth fails."""
|
||||
mock_nextdns.create.side_effect = InvalidApiKeyError
|
||||
|
||||
await init_integration(hass, mock_config_entry)
|
||||
with patch(
|
||||
"homeassistant.components.nextdns.NextDns.create",
|
||||
side_effect=InvalidApiKeyError,
|
||||
):
|
||||
await init_integration(hass, mock_config_entry)
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR
|
||||
|
||||
|
||||
@@ -34,10 +34,8 @@
|
||||
]),
|
||||
}),
|
||||
'entry_data': dict({
|
||||
'filters': dict({
|
||||
'area_filter': '.*',
|
||||
'headline_filter': '.*corona.*',
|
||||
}),
|
||||
'area_filter': '.*',
|
||||
'headline_filter': '.*corona.*',
|
||||
'regions': dict({
|
||||
'083350000000': 'Aach, Stadt',
|
||||
}),
|
||||
|
||||
@@ -31,29 +31,21 @@ from tests.common import MockConfigEntry
|
||||
|
||||
ENTRY_DATA: dict[str, Any] = {
|
||||
"slots": 5,
|
||||
"corona_filter": True,
|
||||
"regions": {"083350000000": "Aach, Stadt"},
|
||||
"filters": {
|
||||
"headline_filter": ".*corona.*",
|
||||
"area_filter": ".*",
|
||||
},
|
||||
}
|
||||
|
||||
ENTRY_DATA_NO_CORONA: dict[str, Any] = {
|
||||
"slots": 5,
|
||||
"corona_filter": False,
|
||||
"regions": {"083350000000": "Aach, Stadt"},
|
||||
"filters": {
|
||||
"headline_filter": "/(?!)/",
|
||||
"area_filter": ".*",
|
||||
},
|
||||
}
|
||||
|
||||
ENTRY_DATA_NO_AREA: dict[str, Any] = {
|
||||
"slots": 5,
|
||||
"corona_filter": False,
|
||||
"area_filter": ".*nagold.*",
|
||||
"regions": {"083350000000": "Aach, Stadt"},
|
||||
"filters": {
|
||||
"headline_filter": "/(?!)/",
|
||||
"area_filter": ".*nagold.*",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -65,7 +57,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry)
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
conf_entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA, version=1, minor_version=3
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA
|
||||
)
|
||||
conf_entry.add_to_hass(hass)
|
||||
|
||||
@@ -186,11 +178,7 @@ async def test_sensors_without_corona_filter(
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
conf_entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="NINA",
|
||||
data=ENTRY_DATA_NO_CORONA,
|
||||
version=1,
|
||||
minor_version=3,
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA_NO_CORONA
|
||||
)
|
||||
conf_entry.add_to_hass(hass)
|
||||
|
||||
@@ -323,11 +311,7 @@ async def test_sensors_with_area_filter(
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
conf_entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="NINA",
|
||||
data=ENTRY_DATA_NO_AREA,
|
||||
version=1,
|
||||
minor_version=3,
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA_NO_AREA
|
||||
)
|
||||
conf_entry.add_to_hass(hass)
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ from pynina import ApiError
|
||||
|
||||
from homeassistant.components.nina.const import (
|
||||
CONF_AREA_FILTER,
|
||||
CONF_FILTERS,
|
||||
CONF_HEADLINE_FILTER,
|
||||
CONF_MESSAGE_SLOTS,
|
||||
CONF_REGIONS,
|
||||
@@ -40,10 +39,8 @@ DUMMY_DATA: dict[str, Any] = {
|
||||
CONST_REGION_M_TO_Q: ["071380000000_0", "071380000000_1"],
|
||||
CONST_REGION_R_TO_U: ["072320000000_0", "072320000000_1"],
|
||||
CONST_REGION_V_TO_Z: ["081270000000_0", "081270000000_1"],
|
||||
CONF_FILTERS: {
|
||||
CONF_HEADLINE_FILTER: ".*corona.*",
|
||||
CONF_AREA_FILTER: ".*",
|
||||
},
|
||||
CONF_HEADLINE_FILTER: ".*corona.*",
|
||||
CONF_AREA_FILTER: ".*",
|
||||
}
|
||||
|
||||
DUMMY_RESPONSE_REGIONS: dict[str, Any] = json.loads(
|
||||
@@ -115,13 +112,6 @@ async def test_step_user(hass: HomeAssistant) -> None:
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "NINA"
|
||||
assert result["data"] == deepcopy(DUMMY_DATA) | {
|
||||
CONF_REGIONS: {
|
||||
"095760000000": "Allersberg, M (Roth - Bayern) + Büchenbach (Roth - Bayern)"
|
||||
}
|
||||
}
|
||||
assert result["version"] == 1
|
||||
assert result["minor_version"] == 3
|
||||
|
||||
|
||||
async def test_step_user_no_selection(hass: HomeAssistant) -> None:
|
||||
@@ -131,9 +121,7 @@ async def test_step_user_no_selection(hass: HomeAssistant) -> None:
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
result: dict[str, Any] = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
data={CONF_FILTERS: {CONF_HEADLINE_FILTER: ""}},
|
||||
DOMAIN, context={"source": SOURCE_USER}, data={CONF_HEADLINE_FILTER: ""}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
@@ -147,7 +135,7 @@ async def test_step_user_already_configured(hass: HomeAssistant) -> None:
|
||||
"pynina.baseApi.BaseAPI._makeRequest",
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
await hass.config_entries.flow.async_init(
|
||||
result: dict[str, Any] = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}, data=deepcopy(DUMMY_DATA)
|
||||
)
|
||||
|
||||
@@ -165,13 +153,12 @@ async def test_options_flow_init(hass: HomeAssistant) -> None:
|
||||
domain=DOMAIN,
|
||||
title="NINA",
|
||||
data={
|
||||
CONF_FILTERS: deepcopy(DUMMY_DATA[CONF_FILTERS]),
|
||||
CONF_HEADLINE_FILTER: deepcopy(DUMMY_DATA[CONF_HEADLINE_FILTER]),
|
||||
CONF_AREA_FILTER: deepcopy(DUMMY_DATA[CONF_AREA_FILTER]),
|
||||
CONF_MESSAGE_SLOTS: deepcopy(DUMMY_DATA[CONF_MESSAGE_SLOTS]),
|
||||
CONST_REGION_A_TO_D: deepcopy(DUMMY_DATA[CONST_REGION_A_TO_D]),
|
||||
CONF_REGIONS: {"095760000000": "Aach"},
|
||||
},
|
||||
version=1,
|
||||
minor_version=3,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -199,7 +186,6 @@ async def test_options_flow_init(hass: HomeAssistant) -> None:
|
||||
CONST_REGION_M_TO_Q: [],
|
||||
CONST_REGION_R_TO_U: [],
|
||||
CONST_REGION_V_TO_Z: [],
|
||||
CONF_FILTERS: {},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -207,7 +193,8 @@ async def test_options_flow_init(hass: HomeAssistant) -> None:
|
||||
assert result["data"] == {}
|
||||
|
||||
assert dict(config_entry.data) == {
|
||||
CONF_FILTERS: deepcopy(DUMMY_DATA[CONF_FILTERS]),
|
||||
CONF_HEADLINE_FILTER: deepcopy(DUMMY_DATA[CONF_HEADLINE_FILTER]),
|
||||
CONF_AREA_FILTER: deepcopy(DUMMY_DATA[CONF_AREA_FILTER]),
|
||||
CONF_MESSAGE_SLOTS: deepcopy(DUMMY_DATA[CONF_MESSAGE_SLOTS]),
|
||||
CONST_REGION_A_TO_D: ["072350000000_1"],
|
||||
CONST_REGION_E_TO_H: [],
|
||||
@@ -227,8 +214,6 @@ async def test_options_flow_with_no_selection(hass: HomeAssistant) -> None:
|
||||
domain=DOMAIN,
|
||||
title="NINA",
|
||||
data=deepcopy(DUMMY_DATA),
|
||||
version=1,
|
||||
minor_version=3,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -256,7 +241,7 @@ async def test_options_flow_with_no_selection(hass: HomeAssistant) -> None:
|
||||
CONST_REGION_M_TO_Q: [],
|
||||
CONST_REGION_R_TO_U: [],
|
||||
CONST_REGION_V_TO_Z: [],
|
||||
CONF_FILTERS: {CONF_HEADLINE_FILTER: ""},
|
||||
CONF_HEADLINE_FILTER: "",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -271,8 +256,6 @@ async def test_options_flow_connection_error(hass: HomeAssistant) -> None:
|
||||
domain=DOMAIN,
|
||||
title="NINA",
|
||||
data=deepcopy(DUMMY_DATA),
|
||||
version=1,
|
||||
minor_version=3,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -301,8 +284,6 @@ async def test_options_flow_unexpected_exception(hass: HomeAssistant) -> None:
|
||||
domain=DOMAIN,
|
||||
title="NINA",
|
||||
data=deepcopy(DUMMY_DATA),
|
||||
version=1,
|
||||
minor_version=3,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -334,8 +315,6 @@ async def test_options_flow_entity_removal(
|
||||
domain=DOMAIN,
|
||||
title="NINA",
|
||||
data=deepcopy(DUMMY_DATA) | {CONF_REGIONS: {"095760000000": "Aach"}},
|
||||
version=1,
|
||||
minor_version=3,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -360,7 +339,6 @@ async def test_options_flow_entity_removal(
|
||||
CONST_REGION_M_TO_Q: [],
|
||||
CONST_REGION_R_TO_U: [],
|
||||
CONST_REGION_V_TO_Z: [],
|
||||
CONF_FILTERS: {},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -16,11 +16,8 @@ from tests.typing import ClientSessionGenerator
|
||||
|
||||
ENTRY_DATA: dict[str, Any] = {
|
||||
"slots": 5,
|
||||
"corona_filter": True,
|
||||
"regions": {"083350000000": "Aach, Stadt"},
|
||||
"filters": {
|
||||
"headline_filter": ".*corona.*",
|
||||
"area_filter": ".*",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -36,7 +33,7 @@ async def test_diagnostics(
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
config_entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA, version=1, minor_version=3
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA
|
||||
)
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -16,11 +16,9 @@ from tests.common import MockConfigEntry
|
||||
|
||||
ENTRY_DATA: dict[str, Any] = {
|
||||
"slots": 5,
|
||||
"headline_filter": ".*corona.*",
|
||||
"area_filter": ".*",
|
||||
"regions": {"083350000000": "Aach, Stadt"},
|
||||
"filters": {
|
||||
"headline_filter": ".*corona.*",
|
||||
"area_filter": ".*",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -32,7 +30,7 @@ async def init_integration(hass: HomeAssistant) -> MockConfigEntry:
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA, version=1, minor_version=3
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
@@ -41,8 +39,9 @@ async def init_integration(hass: HomeAssistant) -> MockConfigEntry:
|
||||
return entry
|
||||
|
||||
|
||||
async def test_config_migration_from1_1(hass: HomeAssistant) -> None:
|
||||
async def test_config_migration(hass: HomeAssistant) -> None:
|
||||
"""Test the migration to a new configuration layout."""
|
||||
|
||||
old_entry_data: dict[str, Any] = {
|
||||
"slots": 5,
|
||||
"corona_filter": True,
|
||||
@@ -50,70 +49,15 @@ async def test_config_migration_from1_1(hass: HomeAssistant) -> None:
|
||||
}
|
||||
|
||||
old_conf_entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN, title="NINA", data=old_entry_data, version=1
|
||||
domain=DOMAIN, title="NINA", data=old_entry_data
|
||||
)
|
||||
|
||||
with patch(
|
||||
"pynina.baseApi.BaseAPI._makeRequest",
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
old_conf_entry.add_to_hass(hass)
|
||||
old_conf_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(old_conf_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await hass.config_entries.async_setup(old_conf_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert dict(old_conf_entry.data) == ENTRY_DATA
|
||||
assert old_conf_entry.state is ConfigEntryState.LOADED
|
||||
assert old_conf_entry.version == 1
|
||||
assert old_conf_entry.minor_version == 3
|
||||
|
||||
|
||||
async def test_config_migration_from1_2(hass: HomeAssistant) -> None:
|
||||
"""Test the migration to a new configuration layout with sections."""
|
||||
old_entry_data: dict[str, Any] = {
|
||||
"slots": 5,
|
||||
"headline_filter": ".*corona.*",
|
||||
"area_filter": ".*",
|
||||
"regions": {"083350000000": "Aach, Stadt"},
|
||||
}
|
||||
|
||||
old_conf_entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN, title="NINA", data=old_entry_data, version=1, minor_version=2
|
||||
)
|
||||
|
||||
with patch(
|
||||
"pynina.baseApi.BaseAPI._makeRequest",
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
old_conf_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(old_conf_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert dict(old_conf_entry.data) == ENTRY_DATA
|
||||
assert old_conf_entry.state is ConfigEntryState.LOADED
|
||||
assert old_conf_entry.version == 1
|
||||
assert old_conf_entry.minor_version == 3
|
||||
|
||||
|
||||
async def test_config_migration_downgrade(hass: HomeAssistant) -> None:
|
||||
"""Test the migration to an old version."""
|
||||
|
||||
conf_entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA, version=2
|
||||
)
|
||||
|
||||
with patch(
|
||||
"pynina.baseApi.BaseAPI._makeRequest",
|
||||
wraps=mocked_request_function,
|
||||
):
|
||||
conf_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(conf_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert dict(conf_entry.data) == ENTRY_DATA
|
||||
assert conf_entry.state is ConfigEntryState.MIGRATION_ERROR
|
||||
assert dict(old_conf_entry.data) == ENTRY_DATA
|
||||
|
||||
|
||||
async def test_config_entry_not_ready(hass: HomeAssistant) -> None:
|
||||
@@ -130,7 +74,7 @@ async def test_sensors_connection_error(hass: HomeAssistant) -> None:
|
||||
side_effect=ApiError("Could not connect to Api"),
|
||||
):
|
||||
conf_entry: MockConfigEntry = MockConfigEntry(
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA, version=1, minor_version=3
|
||||
domain=DOMAIN, title="NINA", data=ENTRY_DATA
|
||||
)
|
||||
|
||||
conf_entry.add_to_hass(hass)
|
||||
|
||||
@@ -864,7 +864,7 @@ async def test_translated_unit(
|
||||
"""Test translated unit."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.entity_platform.translation.async_get_translations",
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
return_value={
|
||||
"component.test.entity.number.test_translation_key.unit_of_measurement": "Tests"
|
||||
},
|
||||
@@ -896,7 +896,7 @@ async def test_translated_unit_with_native_unit_raises(
|
||||
"""Test that translated unit."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.entity_platform.translation.async_get_translations",
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
return_value={
|
||||
"component.test.entity.number.test_translation_key.unit_of_measurement": "Tests"
|
||||
},
|
||||
|
||||
@@ -627,6 +627,9 @@ async def test_service_descriptions(hass: HomeAssistant) -> None:
|
||||
|
||||
assert descriptions[DOMAIN]["test_name"]["name"] == "ABC"
|
||||
|
||||
# Test 4: verify that names from YAML are taken into account as well
|
||||
assert descriptions[DOMAIN]["turn_on"]["name"] == "Turn on"
|
||||
|
||||
|
||||
async def test_shared_context(hass: HomeAssistant) -> None:
|
||||
"""Test that the shared context is passed down the chain."""
|
||||
|
||||
@@ -601,7 +601,7 @@ async def test_translated_unit(
|
||||
"""Test translated unit."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.entity_platform.translation.async_get_translations",
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
return_value={
|
||||
"component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests"
|
||||
},
|
||||
@@ -633,7 +633,7 @@ async def test_translated_unit_with_native_unit_raises(
|
||||
"""Test that translated unit."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.entity_platform.translation.async_get_translations",
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
return_value={
|
||||
"component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests"
|
||||
},
|
||||
@@ -664,7 +664,7 @@ async def test_unit_translation_key_without_platform_raises(
|
||||
"""Test that unit translation key property raises if the entity has no platform yet."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.entity_platform.translation.async_get_translations",
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
return_value={
|
||||
"component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests"
|
||||
},
|
||||
|
||||
@@ -1,436 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"ble": {
|
||||
"enable": false,
|
||||
"rpc": {
|
||||
"enable": false
|
||||
}
|
||||
},
|
||||
"boolean:200": {
|
||||
"access": "crw",
|
||||
"default_value": false,
|
||||
"id": 200,
|
||||
"meta": {
|
||||
"cloud": ["log"],
|
||||
"svc": {
|
||||
"dpId": 101
|
||||
},
|
||||
"ui": {
|
||||
"view": "toggle"
|
||||
}
|
||||
},
|
||||
"name": "Zone 1",
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "zone0"
|
||||
},
|
||||
"boolean:201": {
|
||||
"access": "crw",
|
||||
"default_value": false,
|
||||
"id": 201,
|
||||
"meta": {
|
||||
"cloud": ["log"],
|
||||
"svc": {
|
||||
"dpId": 102
|
||||
},
|
||||
"ui": {
|
||||
"view": "toggle"
|
||||
}
|
||||
},
|
||||
"name": "Zone 2",
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "zone1"
|
||||
},
|
||||
"boolean:202": {
|
||||
"access": "crw",
|
||||
"default_value": false,
|
||||
"id": 202,
|
||||
"meta": {
|
||||
"cloud": ["log"],
|
||||
"svc": {
|
||||
"dpId": 103
|
||||
},
|
||||
"ui": {
|
||||
"view": "toggle"
|
||||
}
|
||||
},
|
||||
"name": "Zone 3",
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "zone2"
|
||||
},
|
||||
"boolean:203": {
|
||||
"access": "crw",
|
||||
"default_value": false,
|
||||
"id": 203,
|
||||
"meta": {
|
||||
"cloud": ["log"],
|
||||
"svc": {
|
||||
"dpId": 104
|
||||
},
|
||||
"ui": {
|
||||
"view": "toggle"
|
||||
}
|
||||
},
|
||||
"name": "Zone 4",
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "zone3"
|
||||
},
|
||||
"boolean:204": {
|
||||
"access": "crw",
|
||||
"default_value": false,
|
||||
"id": 204,
|
||||
"meta": {
|
||||
"cloud": ["log"],
|
||||
"svc": {
|
||||
"dpId": 105
|
||||
},
|
||||
"ui": {
|
||||
"view": "toggle"
|
||||
}
|
||||
},
|
||||
"name": "Zone 5",
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "zone4"
|
||||
},
|
||||
"boolean:205": {
|
||||
"access": "crw",
|
||||
"default_value": false,
|
||||
"id": 205,
|
||||
"meta": {
|
||||
"cloud": ["log"],
|
||||
"svc": {
|
||||
"dpId": 106
|
||||
},
|
||||
"ui": {
|
||||
"view": "toggle"
|
||||
}
|
||||
},
|
||||
"name": "Zone 6",
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "zone5"
|
||||
},
|
||||
"bthome": {},
|
||||
"cloud": {
|
||||
"enable": false,
|
||||
"server": "wss://repo.shelly.cloud:6022/jrpc"
|
||||
},
|
||||
"enum:200": {
|
||||
"access": "crw",
|
||||
"default_value": "none",
|
||||
"id": 200,
|
||||
"meta": {
|
||||
"ui": {
|
||||
"titles": {
|
||||
"none": "None",
|
||||
"seq_0": "All"
|
||||
},
|
||||
"view": "select"
|
||||
}
|
||||
},
|
||||
"name": "Active Sequence",
|
||||
"options": ["none", "seq_0"],
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "active_sequence"
|
||||
},
|
||||
"mqtt": {
|
||||
"client_id": "irrigation-aabbccddeeff",
|
||||
"enable": false,
|
||||
"enable_control": true,
|
||||
"enable_rpc": true,
|
||||
"rpc_ntf": true,
|
||||
"server": "mqtt.test.server",
|
||||
"ssl_ca": null,
|
||||
"status_ntf": false,
|
||||
"topic_prefix": "irrigation-aabbccddeeff",
|
||||
"use_client_cert": false,
|
||||
"user": null
|
||||
},
|
||||
"number:200": {
|
||||
"access": "cr",
|
||||
"default_value": 0,
|
||||
"id": 200,
|
||||
"max": 100,
|
||||
"meta": {
|
||||
"cloud": ["measurement", "log"],
|
||||
"ui": {
|
||||
"unit": "°C",
|
||||
"view": "label"
|
||||
}
|
||||
},
|
||||
"min": -50,
|
||||
"name": "Average Temperature",
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "average_temperature"
|
||||
},
|
||||
"number:201": {
|
||||
"access": "cr",
|
||||
"default_value": 0,
|
||||
"id": 201,
|
||||
"max": 99999,
|
||||
"meta": {
|
||||
"cloud": ["measurement", "log"],
|
||||
"ui": {
|
||||
"unit": "mm/m2",
|
||||
"view": "label"
|
||||
}
|
||||
},
|
||||
"min": 0,
|
||||
"name": "Rainfall last 24h",
|
||||
"owner": "service:0",
|
||||
"persisted": false,
|
||||
"role": "last_precipitation"
|
||||
},
|
||||
"object:200": {
|
||||
"access": "crw",
|
||||
"id": 200,
|
||||
"meta": null,
|
||||
"name": "Zones status",
|
||||
"owner": "service:0",
|
||||
"role": "zones_status"
|
||||
},
|
||||
"service:0": {
|
||||
"base_temp": 20,
|
||||
"duration_offset": 0,
|
||||
"id": 0,
|
||||
"limit_zones": 2,
|
||||
"seq_rev": 11,
|
||||
"weather_api": true,
|
||||
"zones": [
|
||||
{
|
||||
"duration": 10,
|
||||
"name": "Zone Name 1",
|
||||
"water_amount": 10
|
||||
},
|
||||
{
|
||||
"duration": 11,
|
||||
"name": "Zone Name 2",
|
||||
"water_amount": 10
|
||||
},
|
||||
{
|
||||
"duration": 10,
|
||||
"name": "Zone Name 3",
|
||||
"water_amount": 10
|
||||
},
|
||||
{
|
||||
"duration": 10,
|
||||
"name": null,
|
||||
"water_amount": 10
|
||||
},
|
||||
{
|
||||
"duration": 10,
|
||||
"name": null,
|
||||
"water_amount": 10
|
||||
},
|
||||
{
|
||||
"duration": 10,
|
||||
"name": null,
|
||||
"water_amount": 10
|
||||
}
|
||||
]
|
||||
},
|
||||
"sys": {
|
||||
"cfg_rev": 46,
|
||||
"debug": {
|
||||
"file_level": null,
|
||||
"level": 2,
|
||||
"mqtt": {
|
||||
"enable": false
|
||||
},
|
||||
"udp": {
|
||||
"addr": null
|
||||
},
|
||||
"websocket": {
|
||||
"enable": false
|
||||
}
|
||||
},
|
||||
"device": {
|
||||
"discoverable": true,
|
||||
"eco_mode": false,
|
||||
"fw_id": "20241121-103618/1.4.99-xt-prod1-gc6448fb",
|
||||
"mac": "AABBCCDDEEFF",
|
||||
"name": "Test Name"
|
||||
},
|
||||
"location": {
|
||||
"lat": 32.1772,
|
||||
"lon": 34.9039,
|
||||
"tz": "Asia/Tel_Aviv"
|
||||
},
|
||||
"rpc_udp": {
|
||||
"dst_addr": null,
|
||||
"listen_port": null
|
||||
},
|
||||
"sntp": {
|
||||
"server": "sntp.test.server"
|
||||
},
|
||||
"ui_data": {}
|
||||
},
|
||||
"wifi": {
|
||||
"sta": {
|
||||
"ssid": "Wifi-Network-Name",
|
||||
"is_open": false,
|
||||
"enable": true,
|
||||
"ipv4mode": "dhcp",
|
||||
"ip": null,
|
||||
"netmask": null,
|
||||
"gw": null,
|
||||
"nameserver": null
|
||||
}
|
||||
},
|
||||
"ws": {
|
||||
"enable": false,
|
||||
"server": null,
|
||||
"ssl_ca": "ca.pem"
|
||||
}
|
||||
},
|
||||
"shelly": {
|
||||
"app": "XT1",
|
||||
"auth_domain": null,
|
||||
"auth_en": false,
|
||||
"fw_id": "20241121-103618/1.4.99-xt-prod1-gc6448fb",
|
||||
"gen": 3,
|
||||
"id": "irrigation-aabbccddeeff",
|
||||
"jti": "0000C100000A",
|
||||
"jwt": {
|
||||
"aud": "XT1",
|
||||
"f": 1,
|
||||
"iat": 1739274795,
|
||||
"jti": "0000C100000A",
|
||||
"n": "Irrigation controller FK-06X",
|
||||
"p": "Irrigation",
|
||||
"url": "https://frankever.com/",
|
||||
"v": 1,
|
||||
"xt1": {
|
||||
"svc0": {
|
||||
"type": "irrigation-controller"
|
||||
}
|
||||
}
|
||||
},
|
||||
"mac": "AABBCCDDEEFF",
|
||||
"model": "S3XT-0S",
|
||||
"name": "Test Name",
|
||||
"slot": 0,
|
||||
"svc0": {
|
||||
"build_id": "20250305-144715/c28f621",
|
||||
"type": "irrigation-controller",
|
||||
"ver": "0.7.0-irrigation-prod0"
|
||||
},
|
||||
"ver": "1.4.99-xt-prod1"
|
||||
},
|
||||
"status": {
|
||||
"ble": {},
|
||||
"boolean:200": {
|
||||
"value": false
|
||||
},
|
||||
"boolean:201": {
|
||||
"value": false
|
||||
},
|
||||
"boolean:202": {
|
||||
"value": false
|
||||
},
|
||||
"boolean:203": {
|
||||
"value": false
|
||||
},
|
||||
"boolean:204": {
|
||||
"value": false
|
||||
},
|
||||
"boolean:205": {
|
||||
"value": false
|
||||
},
|
||||
"bthome": {
|
||||
"errors": ["bluetooth_disabled"]
|
||||
},
|
||||
"cloud": {
|
||||
"connected": false
|
||||
},
|
||||
"enum:200": {
|
||||
"value": "none"
|
||||
},
|
||||
"mqtt": {
|
||||
"connected": false
|
||||
},
|
||||
"number:200": {
|
||||
"value": 0
|
||||
},
|
||||
"number:201": {
|
||||
"value": 0
|
||||
},
|
||||
"object:200": {
|
||||
"value": {
|
||||
"zone0": {
|
||||
"duration": 10,
|
||||
"source": "rpc",
|
||||
"started_at": 1757947186153.368
|
||||
},
|
||||
"zone1": {
|
||||
"duration": 11,
|
||||
"source": "rpc",
|
||||
"started_at": 1757947188242.7039
|
||||
},
|
||||
"zone2": {
|
||||
"duration": 10,
|
||||
"source": "init",
|
||||
"started_at": null
|
||||
},
|
||||
"zone3": {
|
||||
"duration": 10,
|
||||
"source": "init",
|
||||
"started_at": null
|
||||
},
|
||||
"zone4": {
|
||||
"duration": 10,
|
||||
"source": "init",
|
||||
"started_at": null
|
||||
},
|
||||
"zone5": {
|
||||
"duration": 10,
|
||||
"source": "rpc",
|
||||
"started_at": 1757947190878.225
|
||||
}
|
||||
}
|
||||
},
|
||||
"service:0": {
|
||||
"etag": "56436ed1f373df7e91e8c79588ccbcb0",
|
||||
"state": "running",
|
||||
"stats": {
|
||||
"mem": 1363,
|
||||
"mem_peak": 1590
|
||||
}
|
||||
},
|
||||
"sys": {
|
||||
"available_updates": {},
|
||||
"btrelay_rev": 0,
|
||||
"cfg_rev": 46,
|
||||
"fs_free": 565248,
|
||||
"fs_size": 1048576,
|
||||
"kvs_rev": 1,
|
||||
"last_sync_ts": 1757950062,
|
||||
"mac": "AABBCCDDEEFF",
|
||||
"ram_free": 103824,
|
||||
"ram_min_free": 83204,
|
||||
"ram_size": 252672,
|
||||
"reset_reason": 1,
|
||||
"restart_required": false,
|
||||
"schedule_rev": 17,
|
||||
"time": "18:32",
|
||||
"unixtime": 1757950362,
|
||||
"uptime": 1080367,
|
||||
"webhook_rev": 1
|
||||
},
|
||||
"wifi": {
|
||||
"rssi": -38,
|
||||
"ssid": "Wifi-Network-Name",
|
||||
"sta_ip": "192.168.2.24",
|
||||
"status": "got ip"
|
||||
},
|
||||
"ws": {
|
||||
"connected": false
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -536,65 +536,6 @@
|
||||
'state': '5.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_rpc_switch_energy_sensors[sensor.test_name_test_switch_0_energy-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_name_test_switch_0_energy',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
'sensor.private': dict({
|
||||
'suggested_unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'test switch_0 energy',
|
||||
'platform': 'shelly',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-switch:0-energy',
|
||||
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_rpc_switch_energy_sensors[sensor.test_name_test_switch_0_energy-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'energy',
|
||||
'friendly_name': 'Test name test switch_0 energy',
|
||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_name_test_switch_0_energy',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '1234.56789',
|
||||
})
|
||||
# ---
|
||||
# name: test_rpc_switch_energy_sensors[sensor.test_name_test_switch_0_energy_consumed-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -713,12 +654,14 @@
|
||||
'state': '98.76543',
|
||||
})
|
||||
# ---
|
||||
# name: test_shelly_irrigation_weather_sensors[sensor.test_name_average_temperature-entry]
|
||||
# name: test_rpc_switch_energy_sensors[sensor.test_name_test_switch_0_energy-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
@@ -726,7 +669,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_name_average_temperature',
|
||||
'entity_id': 'sensor.test_name_test_switch_0_energy',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -736,86 +679,37 @@
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
'sensor.private': dict({
|
||||
'suggested_unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Average Temperature',
|
||||
'original_name': 'test switch_0 energy',
|
||||
'platform': 'shelly',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-number:200-number_average_temperature',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
'unique_id': '123456789ABC-switch:0-energy',
|
||||
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_shelly_irrigation_weather_sensors[sensor.test_name_average_temperature-state]
|
||||
# name: test_rpc_switch_energy_sensors[sensor.test_name_test_switch_0_energy-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Test name Average Temperature',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
'device_class': 'energy',
|
||||
'friendly_name': 'Test name test switch_0 energy',
|
||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_name_average_temperature',
|
||||
'entity_id': 'sensor.test_name_test_switch_0_energy',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '0',
|
||||
})
|
||||
# ---
|
||||
# name: test_shelly_irrigation_weather_sensors[sensor.test_name_rainfall_last_24h-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_name_rainfall_last_24h',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 0,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.PRECIPITATION: 'precipitation'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Rainfall last 24h',
|
||||
'platform': 'shelly',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-number:201-number_last_precipitation',
|
||||
'unit_of_measurement': <UnitOfPrecipitationDepth.MILLIMETERS: 'mm'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_shelly_irrigation_weather_sensors[sensor.test_name_rainfall_last_24h-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'precipitation',
|
||||
'friendly_name': 'Test name Rainfall last 24h',
|
||||
'unit_of_measurement': <UnitOfPrecipitationDepth.MILLIMETERS: 'mm'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_name_rainfall_last_24h',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '0',
|
||||
'state': '1234.56789',
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -53,11 +53,7 @@ from . import (
|
||||
register_entity,
|
||||
)
|
||||
|
||||
from tests.common import (
|
||||
async_fire_time_changed,
|
||||
async_load_json_object_fixture,
|
||||
mock_restore_cache_with_extra_data,
|
||||
)
|
||||
from tests.common import async_fire_time_changed, mock_restore_cache_with_extra_data
|
||||
|
||||
RELAY_BLOCK_ID = 0
|
||||
SENSOR_BLOCK_ID = 3
|
||||
@@ -1998,39 +1994,3 @@ async def test_cury_sensor_entity(
|
||||
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry == snapshot(name=f"{entity_id}-entry")
|
||||
|
||||
|
||||
async def test_shelly_irrigation_weather_sensors(
|
||||
hass: HomeAssistant,
|
||||
mock_rpc_device: Mock,
|
||||
entity_registry: EntityRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Test Shelly Irrigation controller FK-06X weather sensors."""
|
||||
device_fixture = await async_load_json_object_fixture(
|
||||
hass, "fk-06x_gen3_irrigation.json", DOMAIN
|
||||
)
|
||||
monkeypatch.setattr(mock_rpc_device, "shelly", device_fixture["shelly"])
|
||||
monkeypatch.setattr(mock_rpc_device, "status", device_fixture["status"])
|
||||
monkeypatch.setattr(mock_rpc_device, "config", device_fixture["config"])
|
||||
|
||||
config_entry = await init_integration(hass, gen=3)
|
||||
|
||||
for entity in ("average_temperature", "rainfall_last_24h"):
|
||||
entity_id = f"{SENSOR_DOMAIN}.test_name_{entity}"
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state == snapshot(name=f"{entity_id}-state")
|
||||
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry == snapshot(name=f"{entity_id}-entry")
|
||||
|
||||
# weather api disabled
|
||||
monkeypatch.setitem(mock_rpc_device.config["service:0"], "weather_api", False)
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
for entity in ("average_temperature", "rainfall_last_24h"):
|
||||
entity_id = f"{SENSOR_DOMAIN}.test_name_{entity}"
|
||||
assert hass.states.get(entity_id) is None
|
||||
|
||||
@@ -228,90 +228,6 @@ async def test_reload_template_when_blueprint_changes(hass: HomeAssistant) -> No
|
||||
assert not_inverted.state == "on"
|
||||
|
||||
|
||||
async def test_init_attribute_variables_from_blueprint(hass: HomeAssistant) -> None:
|
||||
"""Test a state based blueprint initializes icon, name, and picture with variables."""
|
||||
blueprint = "test_init_attribute_variables.yaml"
|
||||
source = "switch.foo"
|
||||
entity_id = "sensor.foo"
|
||||
hass.states.async_set(source, "on", {"friendly_name": "Foo"})
|
||||
config = {
|
||||
DOMAIN: [
|
||||
{
|
||||
"use_blueprint": {
|
||||
"path": blueprint,
|
||||
"input": {"switch": source},
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Check initial state
|
||||
sensor = hass.states.get(entity_id)
|
||||
assert sensor
|
||||
assert sensor.state == "True"
|
||||
assert sensor.attributes["icon"] == "mdi:lightbulb"
|
||||
assert sensor.attributes["entity_picture"] == "on.png"
|
||||
assert sensor.attributes["friendly_name"] == "Foo"
|
||||
assert sensor.attributes["extra"] == "ab"
|
||||
|
||||
hass.states.async_set(source, "off", {"friendly_name": "Foo"})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Check to see that the template light works
|
||||
sensor = hass.states.get(entity_id)
|
||||
assert sensor
|
||||
assert sensor.state == "False"
|
||||
assert sensor.attributes["icon"] == "mdi:lightbulb-off"
|
||||
assert sensor.attributes["entity_picture"] == "off.png"
|
||||
assert sensor.attributes["friendly_name"] == "Foo"
|
||||
assert sensor.attributes["extra"] == "ab"
|
||||
|
||||
# Reload the templates without any change, but with updated blueprint
|
||||
blueprint_config = yaml_util.load_yaml(
|
||||
pathlib.Path("tests/testing_config/blueprints/template/") / blueprint
|
||||
)
|
||||
blueprint_config["variables"]["extraa"] = "c"
|
||||
blueprint_config["sensor"]["variables"]["extrab"] = "d"
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.config.load_yaml_config_file",
|
||||
autospec=True,
|
||||
return_value=config,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.blueprint.models.yaml_util.load_yaml_dict",
|
||||
autospec=True,
|
||||
return_value=blueprint_config,
|
||||
),
|
||||
):
|
||||
await hass.services.async_call(DOMAIN, SERVICE_RELOAD, blocking=True)
|
||||
|
||||
sensor = hass.states.get(entity_id)
|
||||
assert sensor
|
||||
assert sensor.state == "False"
|
||||
assert sensor.attributes["icon"] == "mdi:lightbulb-off"
|
||||
assert sensor.attributes["entity_picture"] == "off.png"
|
||||
assert sensor.attributes["friendly_name"] == "Foo"
|
||||
assert sensor.attributes["extra"] == "cd"
|
||||
|
||||
hass.states.async_set(source, "on", {"friendly_name": "Foo"})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
sensor = hass.states.get(entity_id)
|
||||
assert sensor
|
||||
assert sensor.state == "True"
|
||||
assert sensor.attributes["icon"] == "mdi:lightbulb"
|
||||
assert sensor.attributes["entity_picture"] == "on.png"
|
||||
assert sensor.attributes["friendly_name"] == "Foo"
|
||||
assert sensor.attributes["extra"] == "cd"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("blueprint"),
|
||||
["test_event_sensor.yaml", "test_event_sensor_legacy_schema.yaml"],
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.template import DOMAIN
|
||||
from homeassistant.components.template.config import (
|
||||
CONFIG_SECTION_SCHEMA,
|
||||
async_validate_config_section,
|
||||
@@ -13,7 +12,6 @@ from homeassistant.components.template.config import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.script_variables import ScriptVariables
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -258,59 +256,3 @@ async def test_combined_trigger_variables(
|
||||
assert root_variables.as_dict() == expected_root
|
||||
variables: ScriptVariables = validated["binary_sensor"][0].get("variables", empty)
|
||||
assert variables.as_dict() == expected_entity
|
||||
|
||||
|
||||
async def test_state_init_attribute_variables(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test a state based template entity initializes icon, name, and picture with variables."""
|
||||
source = "switch.foo"
|
||||
entity_id = "sensor.foo"
|
||||
|
||||
hass.states.async_set(source, "on", {"friendly_name": "Foo"})
|
||||
config = {
|
||||
"template": [
|
||||
{
|
||||
"variables": {
|
||||
"switch": "switch.foo",
|
||||
"on_icon": "mdi:lightbulb",
|
||||
"on_picture": "on.png",
|
||||
},
|
||||
"sensor": {
|
||||
"variables": {
|
||||
"off_icon": "mdi:lightbulb-off",
|
||||
"off_picture": "off.png",
|
||||
},
|
||||
"name": "{{ state_attr(switch, 'friendly_name') }}",
|
||||
"icon": "{{ on_icon if is_state(switch, 'on') else off_icon }}",
|
||||
"picture": "{{ on_picture if is_state(switch, 'on') else off_picture }}",
|
||||
"state": "{{ is_state(switch, 'on') }}",
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Check initial state
|
||||
sensor = hass.states.get(entity_id)
|
||||
assert sensor
|
||||
assert sensor.state == "True"
|
||||
assert sensor.attributes["icon"] == "mdi:lightbulb"
|
||||
assert sensor.attributes["entity_picture"] == "on.png"
|
||||
assert sensor.attributes["friendly_name"] == "Foo"
|
||||
|
||||
hass.states.async_set(source, "off", {"friendly_name": "Foo"})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Check to see that the template light works
|
||||
sensor = hass.states.get(entity_id)
|
||||
assert sensor
|
||||
assert sensor.state == "False"
|
||||
assert sensor.attributes["icon"] == "mdi:lightbulb-off"
|
||||
assert sensor.attributes["entity_picture"] == "off.png"
|
||||
assert sensor.attributes["friendly_name"] == "Foo"
|
||||
|
||||
@@ -3475,62 +3475,6 @@
|
||||
'state': '37',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.model9_supply_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.model9_supply_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Supply temperature',
|
||||
'platform': 'vicare',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'supply_temperature',
|
||||
'unique_id': 'gateway9_zigbee_################-supply_temperature',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.model9_supply_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'model9 Supply temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.model9_supply_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '31.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.vitovalor_hydraulic_separator_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
"""Test the Victron Remote Monitoring energy platform."""
|
||||
|
||||
from homeassistant.components.victron_remote_monitoring import energy
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_energy_solar_forecast(
|
||||
hass: HomeAssistant, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test fetching the solar forecast for the energy dashboard."""
|
||||
config_entry = init_integration
|
||||
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert await energy.async_get_solar_forecast(hass, config_entry.entry_id) == {
|
||||
"wh_hours": {
|
||||
"2025-04-23T10:00:00+00:00": 5050.1,
|
||||
"2025-04-23T11:00:00+00:00": 5000.2,
|
||||
"2025-04-24T10:00:00+00:00": 2250.3,
|
||||
"2025-04-24T11:00:00+00:00": 2000.4,
|
||||
"2025-04-25T10:00:00+00:00": 1000.5,
|
||||
"2025-04-25T11:00:00+00:00": 500.6,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async def test_energy_missing_entry(hass: HomeAssistant) -> None:
|
||||
"""Return None when config entry cannot be found."""
|
||||
assert await energy.async_get_solar_forecast(hass, "missing") is None
|
||||
|
||||
|
||||
async def test_energy_no_solar_data(
|
||||
hass: HomeAssistant, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Return None when the coordinator has no solar forecast data."""
|
||||
config_entry = init_integration
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
config_entry.runtime_data.data.solar = None
|
||||
|
||||
assert await energy.async_get_solar_forecast(hass, config_entry.entry_id) is None
|
||||
@@ -2,13 +2,18 @@
|
||||
# name: test_get_services
|
||||
dict({
|
||||
'reload': dict({
|
||||
'description': 'Reloads group configuration, entities, and notify services from YAML-configuration.',
|
||||
'fields': dict({
|
||||
}),
|
||||
'name': 'Reload',
|
||||
}),
|
||||
'remove': dict({
|
||||
'description': 'Removes a group.',
|
||||
'fields': dict({
|
||||
'object_id': dict({
|
||||
'description': 'Object ID of this group. This object ID is used as part of the entity ID. Entity ID format: [domain].[object_id].',
|
||||
'example': 'test_group',
|
||||
'name': 'Object ID',
|
||||
'required': True,
|
||||
'selector': dict({
|
||||
'object': dict({
|
||||
@@ -17,11 +22,15 @@
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'name': 'Remove',
|
||||
}),
|
||||
'set': dict({
|
||||
'description': 'Creates/Updates a group.',
|
||||
'fields': dict({
|
||||
'add_entities': dict({
|
||||
'description': 'List of members to be added to the group. Cannot be used in combination with `Entities` or `Remove entities`.',
|
||||
'example': 'domain.entity_id1, domain.entity_id2',
|
||||
'name': 'Add entities',
|
||||
'selector': dict({
|
||||
'entity': dict({
|
||||
'multiple': True,
|
||||
@@ -30,13 +39,17 @@
|
||||
}),
|
||||
}),
|
||||
'all': dict({
|
||||
'description': 'Enable this option if the group should only be used when all entities are in state `on`.',
|
||||
'name': 'All',
|
||||
'selector': dict({
|
||||
'boolean': dict({
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'entities': dict({
|
||||
'description': 'List of all members in the group. Cannot be used in combination with `Add entities` or `Remove entities`.',
|
||||
'example': 'domain.entity_id1, domain.entity_id2',
|
||||
'name': 'Entities',
|
||||
'selector': dict({
|
||||
'entity': dict({
|
||||
'multiple': True,
|
||||
@@ -45,14 +58,18 @@
|
||||
}),
|
||||
}),
|
||||
'icon': dict({
|
||||
'description': 'Name of the icon for the group.',
|
||||
'example': 'mdi:camera',
|
||||
'name': 'Icon',
|
||||
'selector': dict({
|
||||
'icon': dict({
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'name': dict({
|
||||
'description': 'Name of the group.',
|
||||
'example': 'My test group',
|
||||
'name': 'Name',
|
||||
'selector': dict({
|
||||
'text': dict({
|
||||
'multiline': False,
|
||||
@@ -61,7 +78,9 @@
|
||||
}),
|
||||
}),
|
||||
'object_id': dict({
|
||||
'description': 'Object ID of this group. This object ID is used as part of the entity ID. Entity ID format: [domain].[object_id].',
|
||||
'example': 'test_group',
|
||||
'name': 'Object ID',
|
||||
'required': True,
|
||||
'selector': dict({
|
||||
'text': dict({
|
||||
@@ -71,7 +90,9 @@
|
||||
}),
|
||||
}),
|
||||
'remove_entities': dict({
|
||||
'description': 'List of members to be removed from a group. Cannot be used in combination with `Entities` or `Add entities`.',
|
||||
'example': 'domain.entity_id1, domain.entity_id2',
|
||||
'name': 'Remove entities',
|
||||
'selector': dict({
|
||||
'entity': dict({
|
||||
'multiple': True,
|
||||
@@ -80,6 +101,7 @@
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
'name': 'Set',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
@@ -110,8 +132,10 @@
|
||||
'name': 'Translated name',
|
||||
}),
|
||||
'set_level': dict({
|
||||
'description': '',
|
||||
'fields': dict({
|
||||
}),
|
||||
'name': '',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -784,6 +784,10 @@ async def test_get_services(
|
||||
"homeassistant.helpers.service._load_services_file",
|
||||
side_effect=_load_services_file,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
return_value={},
|
||||
),
|
||||
):
|
||||
await websocket_client.send_json_auto_id({"type": "get_services"})
|
||||
msg = await websocket_client.receive_json()
|
||||
|
||||
@@ -4,7 +4,7 @@ from unittest import mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
from whirlpool import aircon, appliancesmanager, auth, dryer, oven, washer
|
||||
from whirlpool import aircon, appliancesmanager, auth, dryer, washer
|
||||
from whirlpool.backendselector import Brand, Region
|
||||
|
||||
from .const import MOCK_SAID1, MOCK_SAID2
|
||||
@@ -49,12 +49,7 @@ def fixture_mock_auth_api():
|
||||
|
||||
@pytest.fixture(name="mock_appliances_manager_api", autouse=True)
|
||||
def fixture_mock_appliances_manager_api(
|
||||
mock_aircon1_api,
|
||||
mock_aircon2_api,
|
||||
mock_washer_api,
|
||||
mock_dryer_api,
|
||||
mock_oven_single_cavity_api,
|
||||
mock_oven_dual_cavity_api,
|
||||
mock_aircon1_api, mock_aircon2_api, mock_washer_api, mock_dryer_api
|
||||
):
|
||||
"""Set up AppliancesManager fixture."""
|
||||
with (
|
||||
@@ -73,10 +68,6 @@ def fixture_mock_appliances_manager_api(
|
||||
]
|
||||
mock_appliances_manager.return_value.washers = [mock_washer_api]
|
||||
mock_appliances_manager.return_value.dryers = [mock_dryer_api]
|
||||
mock_appliances_manager.return_value.ovens = [
|
||||
mock_oven_single_cavity_api,
|
||||
mock_oven_dual_cavity_api,
|
||||
]
|
||||
yield mock_appliances_manager
|
||||
|
||||
|
||||
@@ -164,42 +155,3 @@ def mock_dryer_api():
|
||||
mock_dryer.get_time_remaining.return_value = 3540
|
||||
mock_dryer.get_cycle_status_sensing.return_value = False
|
||||
return mock_dryer
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_oven_single_cavity_api():
|
||||
"""Get a mock of a single cavity oven."""
|
||||
mock_oven = Mock(spec=oven.Oven, said="said_oven_single")
|
||||
mock_oven.name = "Single Cavity Oven"
|
||||
mock_oven.appliance_info = Mock(
|
||||
data_model="oven", category="oven", model_number="12345"
|
||||
)
|
||||
mock_oven.get_cavity_state.return_value = oven.CavityState.Standby
|
||||
mock_oven.get_cook_mode.return_value = oven.CookMode.Bake
|
||||
mock_oven.get_online.return_value = True
|
||||
mock_oven.get_oven_cavity_exists.side_effect = (
|
||||
lambda cavity: cavity == oven.Cavity.Upper
|
||||
)
|
||||
mock_oven.get_temp.return_value = 180
|
||||
mock_oven.get_target_temp.return_value = 200
|
||||
return mock_oven
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_oven_dual_cavity_api():
|
||||
"""Get a mock of a dual cavity oven."""
|
||||
mock_oven = Mock(spec=oven.Oven, said="said_oven_dual")
|
||||
mock_oven.name = "Dual Cavity Oven"
|
||||
mock_oven.appliance_info = Mock(
|
||||
data_model="oven", category="oven", model_number="12345"
|
||||
)
|
||||
mock_oven.get_cavity_state.return_value = oven.CavityState.Standby
|
||||
mock_oven.get_cook_mode.return_value = oven.CookMode.Bake
|
||||
mock_oven.get_online.return_value = True
|
||||
mock_oven.get_oven_cavity_exists.side_effect = lambda cavity: cavity in (
|
||||
oven.Cavity.Upper,
|
||||
oven.Cavity.Lower,
|
||||
)
|
||||
mock_oven.get_temp.return_value = 180
|
||||
mock_oven.get_target_temp.return_value = 200
|
||||
return mock_oven
|
||||
|
||||
@@ -22,16 +22,6 @@
|
||||
}),
|
||||
}),
|
||||
'ovens': dict({
|
||||
'Dual Cavity Oven': dict({
|
||||
'category': 'oven',
|
||||
'data_model': 'oven',
|
||||
'model_number': '12345',
|
||||
}),
|
||||
'Single Cavity Oven': dict({
|
||||
'category': 'oven',
|
||||
'data_model': 'oven',
|
||||
'model_number': '12345',
|
||||
}),
|
||||
}),
|
||||
'washers': dict({
|
||||
'Washer': dict({
|
||||
|
||||
@@ -145,732 +145,6 @@
|
||||
'state': 'running_maincycle',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_lower_oven_cook_mode-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'standby',
|
||||
'bake',
|
||||
'convection_bake',
|
||||
'broil',
|
||||
'convection_broil',
|
||||
'convection_roast',
|
||||
'keep_warm',
|
||||
'air_fry',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.dual_cavity_oven_lower_oven_cook_mode',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Lower oven cook mode',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_cook_mode_lower',
|
||||
'unique_id': 'said_oven_dual-oven_cook_mode_lower',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_lower_oven_cook_mode-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Dual cavity oven Lower oven cook mode',
|
||||
'options': list([
|
||||
'standby',
|
||||
'bake',
|
||||
'convection_bake',
|
||||
'broil',
|
||||
'convection_broil',
|
||||
'convection_roast',
|
||||
'keep_warm',
|
||||
'air_fry',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.dual_cavity_oven_lower_oven_cook_mode',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'bake',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_lower_oven_current_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.dual_cavity_oven_lower_oven_current_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Lower oven current temperature',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_current_temperature_lower',
|
||||
'unique_id': 'said_oven_dual-oven_current_temperature_lower',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_lower_oven_current_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Dual cavity oven Lower oven current temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.dual_cavity_oven_lower_oven_current_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '180',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_lower_oven_state-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'standby',
|
||||
'preheating',
|
||||
'cooking',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.dual_cavity_oven_lower_oven_state',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Lower oven state',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_state_lower',
|
||||
'unique_id': 'said_oven_dual-oven_state_lower',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_lower_oven_state-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Dual cavity oven Lower oven state',
|
||||
'options': list([
|
||||
'standby',
|
||||
'preheating',
|
||||
'cooking',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.dual_cavity_oven_lower_oven_state',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'standby',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_lower_oven_target_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.dual_cavity_oven_lower_oven_target_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Lower oven target temperature',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_target_temperature_lower',
|
||||
'unique_id': 'said_oven_dual-oven_target_temperature_lower',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_lower_oven_target_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Dual cavity oven Lower oven target temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.dual_cavity_oven_lower_oven_target_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '200',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_upper_oven_cook_mode-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'standby',
|
||||
'bake',
|
||||
'convection_bake',
|
||||
'broil',
|
||||
'convection_broil',
|
||||
'convection_roast',
|
||||
'keep_warm',
|
||||
'air_fry',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.dual_cavity_oven_upper_oven_cook_mode',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Upper oven cook mode',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_cook_mode_upper',
|
||||
'unique_id': 'said_oven_dual-oven_cook_mode_upper',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_upper_oven_cook_mode-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Dual cavity oven Upper oven cook mode',
|
||||
'options': list([
|
||||
'standby',
|
||||
'bake',
|
||||
'convection_bake',
|
||||
'broil',
|
||||
'convection_broil',
|
||||
'convection_roast',
|
||||
'keep_warm',
|
||||
'air_fry',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.dual_cavity_oven_upper_oven_cook_mode',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'bake',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_upper_oven_current_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.dual_cavity_oven_upper_oven_current_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Upper oven current temperature',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_current_temperature_upper',
|
||||
'unique_id': 'said_oven_dual-oven_current_temperature_upper',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_upper_oven_current_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Dual cavity oven Upper oven current temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.dual_cavity_oven_upper_oven_current_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '180',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_upper_oven_state-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'standby',
|
||||
'preheating',
|
||||
'cooking',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.dual_cavity_oven_upper_oven_state',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Upper oven state',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_state_upper',
|
||||
'unique_id': 'said_oven_dual-oven_state_upper',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_upper_oven_state-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Dual cavity oven Upper oven state',
|
||||
'options': list([
|
||||
'standby',
|
||||
'preheating',
|
||||
'cooking',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.dual_cavity_oven_upper_oven_state',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'standby',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_upper_oven_target_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.dual_cavity_oven_upper_oven_target_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Upper oven target temperature',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_target_temperature_upper',
|
||||
'unique_id': 'said_oven_dual-oven_target_temperature_upper',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.dual_cavity_oven_upper_oven_target_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Dual cavity oven Upper oven target temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.dual_cavity_oven_upper_oven_target_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '200',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.single_cavity_oven_cook_mode-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'standby',
|
||||
'bake',
|
||||
'convection_bake',
|
||||
'broil',
|
||||
'convection_broil',
|
||||
'convection_roast',
|
||||
'keep_warm',
|
||||
'air_fry',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.single_cavity_oven_cook_mode',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Cook mode',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_cook_mode',
|
||||
'unique_id': 'said_oven_single-oven_cook_mode',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.single_cavity_oven_cook_mode-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Single cavity oven Cook mode',
|
||||
'options': list([
|
||||
'standby',
|
||||
'bake',
|
||||
'convection_bake',
|
||||
'broil',
|
||||
'convection_broil',
|
||||
'convection_roast',
|
||||
'keep_warm',
|
||||
'air_fry',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.single_cavity_oven_cook_mode',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'bake',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.single_cavity_oven_current_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.single_cavity_oven_current_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Current temperature',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_current_temperature',
|
||||
'unique_id': 'said_oven_single-oven_current_temperature',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.single_cavity_oven_current_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Single cavity oven Current temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.single_cavity_oven_current_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '180',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.single_cavity_oven_state-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'standby',
|
||||
'preheating',
|
||||
'cooking',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.single_cavity_oven_state',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'State',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_state',
|
||||
'unique_id': 'said_oven_single-oven_state',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.single_cavity_oven_state-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Single cavity oven State',
|
||||
'options': list([
|
||||
'standby',
|
||||
'preheating',
|
||||
'cooking',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.single_cavity_oven_state',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'standby',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.single_cavity_oven_target_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.single_cavity_oven_target_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Target temperature',
|
||||
'platform': 'whirlpool',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'oven_target_temperature',
|
||||
'unique_id': 'said_oven_single-oven_target_temperature',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.single_cavity_oven_target_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Single cavity oven Target temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.single_cavity_oven_target_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '200',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.washer_detergent_level-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -210,7 +210,6 @@ async def test_no_appliances_flow(
|
||||
mock_appliances_manager_api.return_value.aircons = []
|
||||
mock_appliances_manager_api.return_value.washers = []
|
||||
mock_appliances_manager_api.return_value.dryers = []
|
||||
mock_appliances_manager_api.return_value.ovens = []
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], CONFIG_INPUT | {CONF_REGION: region[0], CONF_BRAND: brand[0]}
|
||||
)
|
||||
|
||||
@@ -83,8 +83,6 @@ async def test_setup_no_appliances(
|
||||
mock_appliances_manager_api.return_value.aircons = []
|
||||
mock_appliances_manager_api.return_value.washers = []
|
||||
mock_appliances_manager_api.return_value.dryers = []
|
||||
mock_appliances_manager_api.return_value.ovens = []
|
||||
|
||||
await init_integration(hass)
|
||||
assert len(hass.states.async_all()) == 0
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
from whirlpool.dryer import MachineState as DryerMachineState
|
||||
from whirlpool.oven import CavityState as OvenCavityState, CookMode
|
||||
from whirlpool.washer import MachineState as WasherMachineState
|
||||
|
||||
from homeassistant.components.whirlpool.sensor import SCAN_INTERVAL
|
||||
@@ -313,60 +312,6 @@ async def test_washer_running_states(
|
||||
(5, "active"),
|
||||
],
|
||||
),
|
||||
(
|
||||
"sensor.dual_cavity_oven_upper_oven_state",
|
||||
"mock_oven_dual_cavity_api",
|
||||
"get_cavity_state",
|
||||
[
|
||||
(OvenCavityState.Standby, "standby"),
|
||||
(OvenCavityState.Preheating, "preheating"),
|
||||
(OvenCavityState.Cooking, "cooking"),
|
||||
(None, STATE_UNKNOWN),
|
||||
],
|
||||
),
|
||||
(
|
||||
"sensor.dual_cavity_oven_upper_oven_cook_mode",
|
||||
"mock_oven_dual_cavity_api",
|
||||
"get_cook_mode",
|
||||
[
|
||||
(CookMode.Standby, "standby"),
|
||||
(CookMode.Bake, "bake"),
|
||||
(CookMode.ConvectBake, "convection_bake"),
|
||||
(CookMode.Broil, "broil"),
|
||||
(CookMode.ConvectBroil, "convection_broil"),
|
||||
(CookMode.ConvectRoast, "convection_roast"),
|
||||
(CookMode.KeepWarm, "keep_warm"),
|
||||
(CookMode.AirFry, "air_fry"),
|
||||
(None, STATE_UNKNOWN),
|
||||
],
|
||||
),
|
||||
(
|
||||
"sensor.single_cavity_oven_state",
|
||||
"mock_oven_single_cavity_api",
|
||||
"get_cavity_state",
|
||||
[
|
||||
(OvenCavityState.Standby, "standby"),
|
||||
(OvenCavityState.Preheating, "preheating"),
|
||||
(OvenCavityState.Cooking, "cooking"),
|
||||
(None, STATE_UNKNOWN),
|
||||
],
|
||||
),
|
||||
(
|
||||
"sensor.single_cavity_oven_cook_mode",
|
||||
"mock_oven_single_cavity_api",
|
||||
"get_cook_mode",
|
||||
[
|
||||
(CookMode.Standby, "standby"),
|
||||
(CookMode.Bake, "bake"),
|
||||
(CookMode.ConvectBake, "convection_bake"),
|
||||
(CookMode.Broil, "broil"),
|
||||
(CookMode.ConvectBroil, "convection_broil"),
|
||||
(CookMode.ConvectRoast, "convection_roast"),
|
||||
(CookMode.KeepWarm, "keep_warm"),
|
||||
(CookMode.AirFry, "air_fry"),
|
||||
(None, STATE_UNKNOWN),
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
|
||||
@@ -458,7 +458,6 @@ async def test_assist_api_prompt(
|
||||
connections={("test", "1234")},
|
||||
suggested_area="Test Area",
|
||||
)
|
||||
device_registry.async_update_device(device.id, name_by_user="Friendly Device")
|
||||
area = area_registry.async_get_area_by_name("Test Area")
|
||||
area_registry.async_update(area.id, aliases=["Alternative name"])
|
||||
entry1 = entity_registry.async_get_or_create(
|
||||
@@ -576,9 +575,9 @@ async def test_assist_api_prompt(
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={("test", "9876-integer-values")},
|
||||
name="1",
|
||||
manufacturer="2",
|
||||
model="3",
|
||||
name=1,
|
||||
manufacturer=2,
|
||||
model=3,
|
||||
suggested_area="Test Area 2",
|
||||
)
|
||||
)
|
||||
@@ -596,12 +595,10 @@ async def test_assist_api_prompt(
|
||||
- names: Living Room
|
||||
domain: light
|
||||
state: 'on'
|
||||
device: Friendly Device
|
||||
areas: Test Area, Alternative name
|
||||
- names: Test Device, my test light
|
||||
domain: light
|
||||
state: unavailable
|
||||
device: Friendly Device
|
||||
areas: Test Area, Alternative name
|
||||
- names: Test Device 2
|
||||
domain: light
|
||||
@@ -640,11 +637,9 @@ async def test_assist_api_prompt(
|
||||
domain: light
|
||||
- names: Living Room
|
||||
domain: light
|
||||
device: Friendly Device
|
||||
areas: Test Area, Alternative name
|
||||
- names: Test Device, my test light
|
||||
domain: light
|
||||
device: Friendly Device
|
||||
areas: Test Area, Alternative name
|
||||
- names: Test Device 2
|
||||
domain: light
|
||||
|
||||
@@ -5,7 +5,6 @@ from collections.abc import Callable, Iterable
|
||||
from copy import deepcopy
|
||||
import dataclasses
|
||||
import io
|
||||
import threading
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
@@ -47,13 +46,14 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
service,
|
||||
)
|
||||
from homeassistant.helpers.translation import async_get_translations
|
||||
from homeassistant.loader import (
|
||||
Integration,
|
||||
async_get_integration,
|
||||
async_get_integrations,
|
||||
)
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.yaml.loader import JSON_TYPE, parse_yaml
|
||||
from homeassistant.util.yaml.loader import parse_yaml
|
||||
|
||||
from tests.common import (
|
||||
MockEntity,
|
||||
@@ -849,7 +849,7 @@ async def test_async_get_all_descriptions(hass: HomeAssistant) -> None:
|
||||
|
||||
assert len(descriptions) == 1
|
||||
assert DOMAIN_GROUP in descriptions
|
||||
assert "description" not in descriptions[DOMAIN_GROUP]["reload"]
|
||||
assert "description" in descriptions[DOMAIN_GROUP]["reload"]
|
||||
assert "fields" in descriptions[DOMAIN_GROUP]["reload"]
|
||||
|
||||
# Does not have services
|
||||
@@ -857,39 +857,26 @@ async def test_async_get_all_descriptions(hass: HomeAssistant) -> None:
|
||||
|
||||
logger_config = {DOMAIN_LOGGER: {}}
|
||||
|
||||
# Test legacy service with translations in services.yaml
|
||||
def _load_services_file(integration: Integration) -> JSON_TYPE:
|
||||
async def async_get_translations(
|
||||
hass: HomeAssistant,
|
||||
language: str,
|
||||
category: str,
|
||||
integrations: Iterable[str] | None = None,
|
||||
config_flow: bool | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Return all backend translations."""
|
||||
translation_key_prefix = f"component.{DOMAIN_LOGGER}.services.set_default_level"
|
||||
return {
|
||||
"set_default_level": {
|
||||
"description": "Translated description",
|
||||
"fields": {
|
||||
"level": {
|
||||
"description": "Field description",
|
||||
"example": "Field example",
|
||||
"name": "Field name",
|
||||
"selector": {
|
||||
"select": {
|
||||
"options": [
|
||||
"debug",
|
||||
"info",
|
||||
"warning",
|
||||
"error",
|
||||
"fatal",
|
||||
"critical",
|
||||
],
|
||||
"translation_key": "level",
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
"name": "Translated name",
|
||||
},
|
||||
"set_level": None,
|
||||
f"{translation_key_prefix}.name": "Translated name",
|
||||
f"{translation_key_prefix}.description": "Translated description",
|
||||
f"{translation_key_prefix}.fields.level.name": "Field name",
|
||||
f"{translation_key_prefix}.fields.level.description": "Field description",
|
||||
f"{translation_key_prefix}.fields.level.example": "Field example",
|
||||
}
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.service._load_services_file",
|
||||
side_effect=_load_services_file,
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
side_effect=async_get_translations,
|
||||
):
|
||||
await async_setup_component(hass, DOMAIN_LOGGER, logger_config)
|
||||
descriptions = await service.async_get_all_descriptions(hass)
|
||||
@@ -1016,11 +1003,18 @@ async def test_async_get_all_descriptions_dot_keys(hass: HomeAssistant) -> None:
|
||||
assert descriptions == {
|
||||
"test_domain": {
|
||||
"test_service": {
|
||||
"description": "",
|
||||
"fields": {
|
||||
"test": {
|
||||
"selector": {"text": {"multiline": False, "multiple": False}}
|
||||
"selector": {
|
||||
"text": {
|
||||
"multiline": False,
|
||||
"multiple": False,
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1102,6 +1096,7 @@ async def test_async_get_all_descriptions_filter(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
test_service_schema = {
|
||||
"description": "",
|
||||
"fields": {
|
||||
"advanced_stuff": {
|
||||
"fields": {
|
||||
@@ -1160,6 +1155,7 @@ async def test_async_get_all_descriptions_filter(hass: HomeAssistant) -> None:
|
||||
},
|
||||
},
|
||||
},
|
||||
"name": "",
|
||||
"target": {
|
||||
"entity": [
|
||||
{
|
||||
@@ -1195,11 +1191,31 @@ async def test_async_get_all_descriptions_failing_integration(
|
||||
integrations[DOMAIN_LOGGER] = ImportError("Failed to load services.yaml")
|
||||
return integrations
|
||||
|
||||
async def wrap_get_translations(
|
||||
hass: HomeAssistant,
|
||||
language: str,
|
||||
category: str,
|
||||
integrations: Iterable[str] | None = None,
|
||||
config_flow: bool | None = None,
|
||||
) -> dict[str, str]:
|
||||
translations = await async_get_translations(
|
||||
hass, language, category, integrations, config_flow
|
||||
)
|
||||
return {
|
||||
key: value
|
||||
for key, value in translations.items()
|
||||
if not key.startswith("component.logger.services.")
|
||||
}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.helpers.service.async_get_integrations",
|
||||
wraps=wrap_get_integrations,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
wrap_get_translations,
|
||||
),
|
||||
):
|
||||
descriptions = await service.async_get_all_descriptions(hass)
|
||||
|
||||
@@ -1208,12 +1224,16 @@ async def test_async_get_all_descriptions_failing_integration(
|
||||
|
||||
# Services are empty defaults if the load fails but should
|
||||
# not raise
|
||||
assert "description" not in descriptions[DOMAIN_GROUP]["remove"]
|
||||
assert descriptions[DOMAIN_GROUP]["remove"]["description"]
|
||||
assert descriptions[DOMAIN_GROUP]["remove"]["fields"]
|
||||
|
||||
assert descriptions[DOMAIN_LOGGER]["set_level"] == {"fields": {}}
|
||||
assert descriptions[DOMAIN_LOGGER]["set_level"] == {
|
||||
"description": "",
|
||||
"fields": {},
|
||||
"name": "",
|
||||
}
|
||||
|
||||
assert "description" not in descriptions[DOMAIN_INPUT_BUTTON]["press"]
|
||||
assert descriptions[DOMAIN_INPUT_BUTTON]["press"]["description"]
|
||||
assert descriptions[DOMAIN_INPUT_BUTTON]["press"]["fields"] == {}
|
||||
assert "target" in descriptions[DOMAIN_INPUT_BUTTON]["press"]
|
||||
|
||||
@@ -1268,7 +1288,7 @@ async def test_async_get_all_descriptions_dynamically_created_services(
|
||||
|
||||
assert len(descriptions) == 1
|
||||
|
||||
assert "description" not in descriptions["group"]["reload"]
|
||||
assert "description" in descriptions["group"]["reload"]
|
||||
assert "fields" in descriptions["group"]["reload"]
|
||||
|
||||
shell_command_config = {DOMAIN_SHELL_COMMAND: {"test_service": "ls /bin"}}
|
||||
@@ -1277,7 +1297,9 @@ async def test_async_get_all_descriptions_dynamically_created_services(
|
||||
|
||||
assert len(descriptions) == 2
|
||||
assert descriptions[DOMAIN_SHELL_COMMAND]["test_service"] == {
|
||||
"description": "",
|
||||
"fields": {},
|
||||
"name": "",
|
||||
"response": {"optional": True},
|
||||
}
|
||||
|
||||
@@ -1292,53 +1314,41 @@ async def test_async_get_all_descriptions_new_service_added_while_loading(
|
||||
|
||||
assert len(descriptions) == 1
|
||||
|
||||
assert "description" not in descriptions["group"]["reload"]
|
||||
assert "description" in descriptions["group"]["reload"]
|
||||
assert "fields" in descriptions["group"]["reload"]
|
||||
|
||||
logger_domain = DOMAIN_LOGGER
|
||||
logger_config = {logger_domain: {}}
|
||||
|
||||
translations_called = threading.Event()
|
||||
translations_wait = threading.Event()
|
||||
translations_called = asyncio.Event()
|
||||
translations_wait = asyncio.Event()
|
||||
|
||||
def _load_services_file(integration: Integration) -> JSON_TYPE:
|
||||
async def async_get_translations(
|
||||
hass: HomeAssistant,
|
||||
language: str,
|
||||
category: str,
|
||||
integrations: Iterable[str] | None = None,
|
||||
config_flow: bool | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Return all backend translations."""
|
||||
translations_called.set()
|
||||
translations_wait.wait()
|
||||
await translations_wait.wait()
|
||||
translation_key_prefix = f"component.{logger_domain}.services.set_default_level"
|
||||
return {
|
||||
"set_default_level": {
|
||||
"description": "Translated description",
|
||||
"fields": {
|
||||
"level": {
|
||||
"description": "Field description",
|
||||
"example": "Field example",
|
||||
"name": "Field name",
|
||||
"selector": {
|
||||
"select": {
|
||||
"options": [
|
||||
"debug",
|
||||
"info",
|
||||
"warning",
|
||||
"error",
|
||||
"fatal",
|
||||
"critical",
|
||||
],
|
||||
"translation_key": "level",
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
"name": "Translated name",
|
||||
},
|
||||
"set_level": None,
|
||||
f"{translation_key_prefix}.name": "Translated name",
|
||||
f"{translation_key_prefix}.description": "Translated description",
|
||||
f"{translation_key_prefix}.fields.level.name": "Field name",
|
||||
f"{translation_key_prefix}.fields.level.description": "Field description",
|
||||
f"{translation_key_prefix}.fields.level.example": "Field example",
|
||||
}
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.service._load_services_file",
|
||||
side_effect=_load_services_file,
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
side_effect=async_get_translations,
|
||||
):
|
||||
await async_setup_component(hass, logger_domain, logger_config)
|
||||
task = asyncio.create_task(service.async_get_all_descriptions(hass))
|
||||
await hass.async_add_executor_job(translations_called.wait)
|
||||
await translations_called.wait()
|
||||
# Now register a new service while translations are being loaded
|
||||
hass.services.async_register(logger_domain, "new_service", lambda x: None, None)
|
||||
service.async_set_service_schema(
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
blueprint:
|
||||
name: Switch to light
|
||||
domain: template
|
||||
input:
|
||||
switch:
|
||||
name: Switch
|
||||
description: The switch which should be converted
|
||||
selector:
|
||||
entity:
|
||||
multiple: false
|
||||
filter:
|
||||
- domain: switch
|
||||
default: null
|
||||
|
||||
variables:
|
||||
switch: !input switch
|
||||
on_icon: mdi:lightbulb
|
||||
on_picture: "on.png"
|
||||
extraa: "a"
|
||||
|
||||
sensor:
|
||||
variables:
|
||||
off_icon: mdi:lightbulb-off
|
||||
off_picture: "off.png"
|
||||
extrab: "b"
|
||||
name: "{{ state_attr(switch, 'friendly_name') }}"
|
||||
icon: "{{ on_icon if is_state(switch, 'on') else off_icon }}"
|
||||
picture: "{{ on_picture if is_state(switch, 'on') else off_picture }}"
|
||||
state: "{{ is_state(switch, 'on') }}"
|
||||
attributes:
|
||||
extra: "{{ extraa ~ extrab }}"
|
||||
Reference in New Issue
Block a user