Compare commits

..

31 Commits

Author SHA1 Message Date
farmio
1b7c9afb2c validate device_class, state_class and unit 2025-12-10 23:08:09 +01:00
farmio
6974a70607 sort in BE for test consistency 2025-12-10 21:11:57 +01:00
farmio
9334d4b108 unit_o_m, device_class translations 2025-12-10 21:02:24 +01:00
Matthias Alphart
57ae4c8656 Update strings.json
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-12-10 15:55:38 +01:00
farmio
76b9a99bdd add tests 2025-12-10 11:19:09 +01:00
farmio
47e9c5785f Support KNX sensor entity configuration from UI 2025-12-09 22:57:20 +01:00
Kevin Stillhammer
38c5e483a8 add entity_picture to fressnapf_tracker (#158099) 2025-12-06 13:28:53 +01:00
Michael
ce14544ec1 Add packet loss sensor to Ping integration (#158081) 2025-12-06 10:57:31 +01:00
mettolen
87b9c3193e Add sensor entities to Airobot integration (#157938) 2025-12-06 07:57:03 +01:00
Adam Goode
061c38d2a7 Make unifi LEDs EntityCategory.CONFIG (#158088) 2025-12-06 07:51:09 +01:00
Allen Porter
e1720be5a4 Update roborock quality scale (#158024) 2025-12-05 22:52:38 +01:00
Paul Bottein
2d13a92496 Update frontend to 20251203.1 (#158069) 2025-12-05 21:25:01 +01:00
Artur Pragacz
b06bffa815 Add ai_task to core files (#158058) 2025-12-05 21:14:49 +01:00
Joost Lekkerkerker
b8f4b9515b Prevent entsoe from loading (#158036)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-12-05 21:08:57 +01:00
Petro31
3c10e9f1c0 Fix inverted kelvin issue (#158054) 2025-12-05 19:12:12 +00:00
Artur Pragacz
2dec3befcd Assign hass in Condition init (#158062) 2025-12-05 19:04:11 +00:00
J. Nick Koston
7d065bf314 Bump aiodns to 3.6.0 (#158063) 2025-12-05 20:00:09 +01:00
Raphael Hehl
3315680d0b Bump uiprotect to 7.33.2 (#158057) 2025-12-05 19:43:44 +01:00
Markus Jacobsen
ce48c89a26 Fix button event entity creation in Bang & Olufsen (#157982)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2025-12-05 18:39:58 +00:00
Jan Bouwhuis
f67a926f56 Move lametric URLs out of strings.json (#158051) 2025-12-05 19:35:07 +01:00
Paulus Schoutsen
e0a9d305b2 Use multiple selector for validation in AI task (#158056) 2025-12-05 18:51:18 +01:00
Jan Bouwhuis
4ff141d35e Move example image path out of translatable strings (#158053) 2025-12-05 18:05:09 +01:00
Artur Pragacz
f12a43b2b7 Mark reauthentication in music assistant quality scale (#158055) 2025-12-05 18:02:16 +01:00
Paul Tarjan
35e6f504a3 Fix doorbird duplicate unique ID generation (#158013)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
2025-12-05 10:17:43 -06:00
Denis Shulyaka
1f68809cf9 Replace deprecated preview image model (#158048) 2025-12-05 07:55:05 -08:00
Paul Bottein
66bddebca1 Add subscribe preview feature endpoint to labs (#157976) 2025-12-05 16:36:56 +01:00
TheJulianJES
2280d779a8 Change ZHA strings for incorrect adapter state (#158021)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-12-05 16:35:34 +01:00
Maciej Bieniek
ebc608845c Do not create restart button for sleeping gen2+ Shelly devices (#158047) 2025-12-05 16:33:11 +01:00
Max Michels
5d13a41926 Move telegram-bot URLs out of strings.json (#155130)
Co-authored-by: Jan Bouwhuis <jbouwh@users.noreply.github.com>
Co-authored-by: jbouwh <jan@jbsoft.nl>
2025-12-05 16:33:01 +01:00
Quentin Ulmer
630b40fbba Fix Rituals Perfume Genie (#151537)
Co-authored-by: Joostlek <joostlek@outlook.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-05 16:16:51 +01:00
Manu
7fd440c4a0 Add coordinator to Duck DNS integration (#158041) 2025-12-05 15:49:48 +01:00
106 changed files with 3139 additions and 1019 deletions

View File

@@ -13,6 +13,7 @@ core: &core
# Our base platforms, that are used by other integrations
base_platforms: &base_platforms
- homeassistant/components/ai_task/**
- homeassistant/components/air_quality/**
- homeassistant/components/alarm_control_panel/**
- homeassistant/components/assist_satellite/**

4
CODEOWNERS generated
View File

@@ -1358,8 +1358,8 @@ build.json @home-assistant/supervisor
/tests/components/ring/ @sdb9696
/homeassistant/components/risco/ @OnFreund
/tests/components/risco/ @OnFreund
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
/tests/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
/homeassistant/components/rmvtransport/ @cgtobi
/tests/components/rmvtransport/ @cgtobi
/homeassistant/components/roborock/ @Lash-L @allenporter

View File

@@ -101,8 +101,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
_validate_structure_fields,
),
vol.Optional(ATTR_ATTACHMENTS): vol.All(
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
vol.Optional(ATTR_ATTACHMENTS): selector.MediaSelector(
{"accept": ["*/*"], "multiple": True}
),
}
),
@@ -118,8 +118,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
vol.Required(ATTR_TASK_NAME): cv.string,
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_INSTRUCTIONS): cv.string,
vol.Optional(ATTR_ATTACHMENTS): vol.All(
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
vol.Optional(ATTR_ATTACHMENTS): selector.MediaSelector(
{"accept": ["*/*"], "multiple": True}
),
}
),

View File

@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
PLATFORMS: list[Platform] = [Platform.CLIMATE]
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:

View File

@@ -44,7 +44,7 @@ rules:
discovery: done
docs-data-update: done
docs-examples: todo
docs-known-limitations: todo
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
@@ -54,7 +54,7 @@ rules:
comment: Single device integration, no dynamic device discovery needed.
entity-category: done
entity-device-class: done
entity-disabled-by-default: todo
entity-disabled-by-default: done
entity-translations: todo
exception-translations: done
icon-translations: todo

View File

@@ -0,0 +1,134 @@
"""Sensor platform for Airobot thermostat."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from pyairobotrest.models import ThermostatStatus
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import (
CONCENTRATION_PARTS_PER_MILLION,
PERCENTAGE,
EntityCategory,
UnitOfTemperature,
UnitOfTime,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from . import AirobotConfigEntry
from .entity import AirobotEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AirobotSensorEntityDescription(SensorEntityDescription):
"""Describes Airobot sensor entity."""
value_fn: Callable[[ThermostatStatus], StateType]
supported_fn: Callable[[ThermostatStatus], bool] = lambda _: True
SENSOR_TYPES: tuple[AirobotSensorEntityDescription, ...] = (
AirobotSensorEntityDescription(
key="air_temperature",
translation_key="air_temperature",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda status: status.temp_air,
),
AirobotSensorEntityDescription(
key="humidity",
device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda status: status.hum_air,
),
AirobotSensorEntityDescription(
key="floor_temperature",
translation_key="floor_temperature",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda status: status.temp_floor,
supported_fn=lambda status: status.has_floor_sensor,
),
AirobotSensorEntityDescription(
key="co2",
device_class=SensorDeviceClass.CO2,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda status: status.co2,
supported_fn=lambda status: status.has_co2_sensor,
),
AirobotSensorEntityDescription(
key="air_quality_index",
device_class=SensorDeviceClass.AQI,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda status: status.aqi,
supported_fn=lambda status: status.has_co2_sensor,
),
AirobotSensorEntityDescription(
key="heating_uptime",
translation_key="heating_uptime",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
suggested_unit_of_measurement=UnitOfTime.HOURS,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda status: status.heating_uptime,
entity_registry_enabled_default=False,
),
AirobotSensorEntityDescription(
key="errors",
translation_key="errors",
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda status: status.errors,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AirobotConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Airobot sensor platform."""
coordinator = entry.runtime_data
async_add_entities(
AirobotSensor(coordinator, description)
for description in SENSOR_TYPES
if description.supported_fn(coordinator.data.status)
)
class AirobotSensor(AirobotEntity, SensorEntity):
"""Representation of an Airobot sensor."""
entity_description: AirobotSensorEntityDescription
def __init__(
self,
coordinator,
description: AirobotSensorEntityDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator.data.status)

View File

@@ -43,6 +43,25 @@
}
}
},
"entity": {
"sensor": {
"air_temperature": {
"name": "Air temperature"
},
"device_uptime": {
"name": "Device uptime"
},
"errors": {
"name": "Error count"
},
"floor_temperature": {
"name": "Floor temperature"
},
"heating_uptime": {
"name": "Heating uptime"
}
}
},
"exceptions": {
"authentication_failed": {
"message": "Authentication failed, please reauthenticate."

View File

@@ -42,14 +42,25 @@ async def get_remotes(client: MozartClient) -> list[PairedRemote]:
def get_device_buttons(model: BeoModel) -> list[str]:
"""Get supported buttons for a given model."""
# Beoconnect Core does not have any buttons
if model == BeoModel.BEOCONNECT_CORE:
return []
buttons = DEVICE_BUTTONS.copy()
# Beosound Premiere does not have a bluetooth button
if model == BeoModel.BEOSOUND_PREMIERE:
# Models that don't have a microphone button
if model in (
BeoModel.BEOSOUND_A5,
BeoModel.BEOSOUND_A9,
BeoModel.BEOSOUND_PREMIERE,
):
buttons.remove(BeoButtons.MICROPHONE)
# Models that don't have a Bluetooth button
if model in (
BeoModel.BEOSOUND_A9,
BeoModel.BEOSOUND_PREMIERE,
):
buttons.remove(BeoButtons.BLUETOOTH)
# Beoconnect Core does not have any buttons
elif model == BeoModel.BEOCONNECT_CORE:
buttons = []
return buttons

View File

@@ -56,7 +56,6 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition):
"""Device condition."""
_hass: HomeAssistant
_config: ConfigType
@classmethod
@@ -87,7 +86,7 @@ class DeviceCondition(Condition):
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
super().__init__(hass, config)
assert config.options is not None
self._config = config.options

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dnsip",
"iot_class": "cloud_polling",
"requirements": ["aiodns==3.5.0"]
"requirements": ["aiodns==3.6.0"]
}

View File

@@ -102,6 +102,12 @@ class ConfiguredDoorBird:
"""Get token for device."""
return self._token
def _get_hass_url(self) -> str:
"""Get the Home Assistant URL for this device."""
if custom_url := self.custom_url:
return custom_url
return get_url(self._hass, prefer_external=False)
async def async_register_events(self) -> None:
"""Register events on device."""
if not self.door_station_events:
@@ -146,13 +152,7 @@ class ConfiguredDoorBird:
async def _async_register_events(self) -> dict[str, Any]:
"""Register events on device."""
# Override url if another is specified in the configuration
if custom_url := self.custom_url:
hass_url = custom_url
else:
# Get the URL of this server
hass_url = get_url(self._hass, prefer_external=False)
hass_url = self._get_hass_url()
http_fav = await self._async_get_http_favorites()
if any(
# Note that a list comp is used here to ensure all
@@ -191,10 +191,14 @@ class ConfiguredDoorBird:
self._get_event_name(event): event_type
for event, event_type in DEFAULT_EVENT_TYPES
}
hass_url = self._get_hass_url()
for identifier, data in http_fav.items():
title: str | None = data.get("title")
if not title or not title.startswith("Home Assistant"):
continue
value: str | None = data.get("value")
if not value or not value.startswith(hass_url):
continue # Not our favorite - different HA instance or stale
event = title.partition("(")[2].strip(")")
if input_type := favorite_input_type.get(identifier):
events.append(DoorbirdEvent(event, input_type))

View File

@@ -2,33 +2,22 @@
from __future__ import annotations
from collections.abc import Callable, Coroutine, Sequence
from datetime import datetime, timedelta
import logging
from typing import Any, cast
from aiohttp import ClientSession
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
from homeassistant.core import (
CALLBACK_TYPE,
HassJob,
HomeAssistant,
ServiceCall,
callback,
)
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.selector import ConfigEntrySelector
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
from homeassistant.util import dt as dt_util
from .const import ATTR_CONFIG_ENTRY
from .coordinator import DuckDnsConfigEntry, DuckDnsUpdateCoordinator
from .helpers import update_duckdns
_LOGGER = logging.getLogger(__name__)
@@ -36,17 +25,8 @@ ATTR_TXT = "txt"
DOMAIN = "duckdns"
INTERVAL = timedelta(minutes=5)
BACKOFF_INTERVALS = (
INTERVAL,
timedelta(minutes=1),
timedelta(minutes=5),
timedelta(minutes=15),
timedelta(minutes=30),
)
SERVICE_SET_TXT = "set_txt"
UPDATE_URL = "https://www.duckdns.org/update"
CONFIG_SCHEMA = vol.Schema(
{
@@ -71,8 +51,6 @@ SERVICE_TXT_SCHEMA = vol.Schema(
}
)
type DuckDnsConfigEntry = ConfigEntry
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Initialize the DuckDNS component."""
@@ -99,21 +77,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
"""Set up Duck DNS from a config entry."""
session = async_get_clientsession(hass)
coordinator = DuckDnsUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
async def update_domain_interval(_now: datetime) -> bool:
"""Update the DuckDNS entry."""
return await _update_duckdns(
session,
entry.data[CONF_DOMAIN],
entry.data[CONF_ACCESS_TOKEN],
)
entry.async_on_unload(
async_track_time_interval_backoff(
hass, update_domain_interval, BACKOFF_INTERVALS
)
)
# Add a dummy listener as we do not have regular entities
entry.async_on_unload(coordinator.async_add_listener(lambda: None))
return True
@@ -153,7 +122,7 @@ async def update_domain_service(call: ServiceCall) -> None:
session = async_get_clientsession(call.hass)
await _update_duckdns(
await update_duckdns(
session,
entry.data[CONF_DOMAIN],
entry.data[CONF_ACCESS_TOKEN],
@@ -164,73 +133,3 @@ async def update_domain_service(call: ServiceCall) -> None:
async def async_unload_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
"""Unload a config entry."""
return True
_SENTINEL = object()
async def _update_duckdns(
session: ClientSession,
domain: str,
token: str,
*,
txt: str | None | object = _SENTINEL,
clear: bool = False,
) -> bool:
"""Update DuckDNS."""
params = {"domains": domain, "token": token}
if txt is not _SENTINEL:
if txt is None:
# Pass in empty txt value to indicate it's clearing txt record
params["txt"] = ""
clear = True
else:
params["txt"] = cast(str, txt)
if clear:
params["clear"] = "true"
resp = await session.get(UPDATE_URL, params=params)
body = await resp.text()
if body != "OK":
_LOGGER.warning("Updating DuckDNS domain failed: %s", domain)
return False
return True
@callback
@bind_hass
def async_track_time_interval_backoff(
hass: HomeAssistant,
action: Callable[[datetime], Coroutine[Any, Any, bool]],
intervals: Sequence[timedelta],
) -> CALLBACK_TYPE:
"""Add a listener that fires repetitively at every timedelta interval."""
remove: CALLBACK_TYPE | None = None
failed = 0
async def interval_listener(now: datetime) -> None:
"""Handle elapsed intervals with backoff."""
nonlocal failed, remove
try:
failed += 1
if await action(now):
failed = 0
finally:
delay = intervals[failed] if failed < len(intervals) else intervals[-1]
remove = async_call_later(
hass, delay.total_seconds(), interval_listener_job
)
interval_listener_job = HassJob(interval_listener, cancel_on_shutdown=True)
hass.async_run_hass_job(interval_listener_job, dt_util.utcnow())
def remove_listener() -> None:
"""Remove interval listener."""
if remove:
remove()
return remove_listener

View File

@@ -16,8 +16,8 @@ from homeassistant.helpers.selector import (
TextSelectorType,
)
from . import _update_duckdns
from .const import DOMAIN
from .helpers import update_duckdns
from .issue import deprecate_yaml_issue
_LOGGER = logging.getLogger(__name__)
@@ -46,7 +46,7 @@ class DuckDnsConfigFlow(ConfigFlow, domain=DOMAIN):
self._async_abort_entries_match({CONF_DOMAIN: user_input[CONF_DOMAIN]})
session = async_get_clientsession(self.hass)
try:
if not await _update_duckdns(
if not await update_duckdns(
session,
user_input[CONF_DOMAIN],
user_input[CONF_ACCESS_TOKEN],
@@ -93,7 +93,7 @@ class DuckDnsConfigFlow(ConfigFlow, domain=DOMAIN):
if user_input is not None:
session = async_get_clientsession(self.hass)
try:
if not await _update_duckdns(
if not await update_duckdns(
session,
entry.data[CONF_DOMAIN],
user_input[CONF_ACCESS_TOKEN],

View File

@@ -0,0 +1,83 @@
"""Coordinator for the Duck DNS integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from aiohttp import ClientError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
from .helpers import update_duckdns
_LOGGER = logging.getLogger(__name__)
type DuckDnsConfigEntry = ConfigEntry[DuckDnsUpdateCoordinator]
INTERVAL = timedelta(minutes=5)
BACKOFF_INTERVALS = (
INTERVAL,
timedelta(minutes=1),
timedelta(minutes=5),
timedelta(minutes=15),
timedelta(minutes=30),
)
class DuckDnsUpdateCoordinator(DataUpdateCoordinator[None]):
"""Duck DNS update coordinator."""
config_entry: DuckDnsConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: DuckDnsConfigEntry) -> None:
"""Initialize the Duck DNS update coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=INTERVAL,
)
self.session = async_get_clientsession(hass)
self.failed = 0
async def _async_update_data(self) -> None:
"""Update Duck DNS."""
retry_after = BACKOFF_INTERVALS[
min(self.failed, len(BACKOFF_INTERVALS))
].total_seconds()
try:
if not await update_duckdns(
self.session,
self.config_entry.data[CONF_DOMAIN],
self.config_entry.data[CONF_ACCESS_TOKEN],
):
self.failed += 1
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_failed",
translation_placeholders={
CONF_DOMAIN: self.config_entry.data[CONF_DOMAIN],
},
retry_after=retry_after,
)
except ClientError as e:
self.failed += 1
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="connection_error",
translation_placeholders={
CONF_DOMAIN: self.config_entry.data[CONF_DOMAIN],
},
retry_after=retry_after,
) from e
self.failed = 0

View File

@@ -0,0 +1,35 @@
"""Helpers for Duck DNS integration."""
from aiohttp import ClientSession
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
UPDATE_URL = "https://www.duckdns.org/update"
async def update_duckdns(
session: ClientSession,
domain: str,
token: str,
*,
txt: str | None | UndefinedType = UNDEFINED,
clear: bool = False,
) -> bool:
"""Update DuckDNS."""
params = {"domains": domain, "token": token}
if txt is not UNDEFINED:
if txt is None:
# Pass in empty txt value to indicate it's clearing txt record
params["txt"] = ""
clear = True
else:
params["txt"] = txt
if clear:
params["clear"] = "true"
resp = await session.get(UPDATE_URL, params=params)
body = await resp.text()
return body == "OK"

View File

@@ -32,11 +32,17 @@
}
},
"exceptions": {
"connection_error": {
"message": "Updating Duck DNS domain {domain} failed due to a connection error"
},
"entry_not_found": {
"message": "Duck DNS integration entry not found"
},
"entry_not_selected": {
"message": "Duck DNS integration entry not selected"
},
"update_failed": {
"message": "Updating Duck DNS domain {domain} failed"
}
},
"issues": {

View File

@@ -42,6 +42,11 @@ class FressnapfTrackerDeviceTracker(FressnapfTrackerBaseEntity, TrackerEntity):
"""Return if entity is available."""
return super().available and self.coordinator.data.position is not None
@property
def entity_picture(self) -> str | None:
"""Return the entity picture url."""
return self.coordinator.data.icon
@property
def latitude(self) -> float | None:
"""Return latitude value of the device."""

View File

@@ -23,5 +23,5 @@
"winter_mode": {}
},
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20251203.0"]
"requirements": ["home-assistant-frontend==20251203.1"]
}

View File

@@ -149,6 +149,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
}
),
supports_response=SupportsResponse.ONLY,
description_placeholders={"example_image_path": "/config/www/image.jpg"},
)
return True

View File

@@ -23,7 +23,7 @@ CONF_CHAT_MODEL = "chat_model"
RECOMMENDED_CHAT_MODEL = "models/gemini-2.5-flash"
RECOMMENDED_STT_MODEL = RECOMMENDED_CHAT_MODEL
RECOMMENDED_TTS_MODEL = "models/gemini-2.5-flash-preview-tts"
RECOMMENDED_IMAGE_MODEL = "models/gemini-2.5-flash-image-preview"
RECOMMENDED_IMAGE_MODEL = "models/gemini-2.5-flash-image"
CONF_TEMPERATURE = "temperature"
RECOMMENDED_TEMPERATURE = 1.0
CONF_TOP_P = "top_p"

View File

@@ -162,7 +162,7 @@
"fields": {
"filenames": {
"description": "Attachments to add to the prompt (images, PDFs, etc)",
"example": "/config/www/image.jpg",
"example": "{example_image_path}",
"name": "Attachment filenames"
},
"prompt": {

View File

@@ -159,4 +159,5 @@ def async_setup_services(hass: HomeAssistant) -> None:
_async_handle_upload,
schema=UPLOAD_SERVICE_SCHEMA,
supports_response=SupportsResponse.OPTIONAL,
description_placeholders={"example_image_path": "/config/www/image.jpg"},
)

View File

@@ -92,7 +92,7 @@
},
"filename": {
"description": "Path to the image or video to upload.",
"example": "/config/www/image.jpg",
"example": "{example_image_path}",
"name": "Filename"
}
},

View File

@@ -165,6 +165,7 @@ SUPPORTED_PLATFORMS_UI: Final = {
Platform.DATE,
Platform.DATETIME,
Platform.LIGHT,
Platform.SENSOR,
Platform.SWITCH,
Platform.TIME,
}

View File

@@ -0,0 +1,142 @@
"""KNX DPT serializer."""
from collections.abc import Mapping
from functools import cache
from typing import Literal, TypedDict
from xknx.dpt import DPTBase, DPTComplex, DPTEnum, DPTNumeric
from xknx.dpt.dpt_16 import DPTString
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
HaDptClass = Literal["numeric", "enum", "complex", "string"]
class DPTInfo(TypedDict):
"""DPT information."""
dpt_class: HaDptClass
main: int
sub: int | None
name: str | None
unit: str | None
sensor_device_class: SensorDeviceClass | None
sensor_state_class: SensorStateClass | None
@cache
def get_supported_dpts() -> Mapping[str, DPTInfo]:
"""Return a mapping of supported DPTs with HA specific attributes."""
dpts = {}
for dpt_class in DPTBase.dpt_class_tree():
dpt_number_str = dpt_class.dpt_number_str()
ha_dpt_class = _ha_dpt_class(dpt_class)
dpts[dpt_number_str] = DPTInfo(
dpt_class=ha_dpt_class,
main=dpt_class.dpt_main_number, # type: ignore[typeddict-item] # checked in xknx unit tests
sub=dpt_class.dpt_sub_number,
name=dpt_class.value_type,
unit=dpt_class.unit,
sensor_device_class=_sensor_device_classes.get(dpt_number_str),
sensor_state_class=_get_sensor_state_class(ha_dpt_class, dpt_number_str),
)
return dpts
def _ha_dpt_class(dpt_cls: type[DPTBase]) -> HaDptClass:
"""Return the DPT class identifier string."""
if issubclass(dpt_cls, DPTNumeric):
return "numeric"
if issubclass(dpt_cls, DPTEnum):
return "enum"
if issubclass(dpt_cls, DPTComplex):
return "complex"
if issubclass(dpt_cls, DPTString):
return "string"
raise ValueError("Unsupported DPT class")
_sensor_device_classes: Mapping[str, SensorDeviceClass] = {
"7.011": SensorDeviceClass.DISTANCE,
"7.012": SensorDeviceClass.CURRENT,
"7.013": SensorDeviceClass.ILLUMINANCE,
"8.012": SensorDeviceClass.DISTANCE,
"9.001": SensorDeviceClass.TEMPERATURE,
"9.002": SensorDeviceClass.TEMPERATURE_DELTA,
"9.004": SensorDeviceClass.ILLUMINANCE,
"9.005": SensorDeviceClass.WIND_SPEED,
"9.006": SensorDeviceClass.PRESSURE,
"9.007": SensorDeviceClass.HUMIDITY,
"9.020": SensorDeviceClass.VOLTAGE,
"9.021": SensorDeviceClass.CURRENT,
"9.024": SensorDeviceClass.POWER,
"9.025": SensorDeviceClass.VOLUME_FLOW_RATE,
"9.027": SensorDeviceClass.TEMPERATURE,
"9.028": SensorDeviceClass.WIND_SPEED,
"9.029": SensorDeviceClass.ABSOLUTE_HUMIDITY,
"12.1200": SensorDeviceClass.VOLUME,
"12.1201": SensorDeviceClass.VOLUME,
"13.002": SensorDeviceClass.VOLUME_FLOW_RATE,
"13.010": SensorDeviceClass.ENERGY,
"13.012": SensorDeviceClass.REACTIVE_ENERGY,
"13.013": SensorDeviceClass.ENERGY,
"13.015": SensorDeviceClass.REACTIVE_ENERGY,
"13.016": SensorDeviceClass.ENERGY,
"14.010": SensorDeviceClass.AREA,
"14.019": SensorDeviceClass.CURRENT,
"14.027": SensorDeviceClass.VOLTAGE,
"14.028": SensorDeviceClass.VOLTAGE,
"14.030": SensorDeviceClass.VOLTAGE,
"14.031": SensorDeviceClass.ENERGY,
"14.033": SensorDeviceClass.FREQUENCY,
"14.037": SensorDeviceClass.ENERGY_STORAGE,
"14.039": SensorDeviceClass.DISTANCE,
"14.051": SensorDeviceClass.WEIGHT,
"14.056": SensorDeviceClass.POWER,
"14.057": SensorDeviceClass.POWER_FACTOR,
"14.058": SensorDeviceClass.PRESSURE,
"14.065": SensorDeviceClass.SPEED,
"14.068": SensorDeviceClass.TEMPERATURE,
"14.069": SensorDeviceClass.TEMPERATURE,
"14.070": SensorDeviceClass.TEMPERATURE_DELTA,
"14.076": SensorDeviceClass.VOLUME,
"14.077": SensorDeviceClass.VOLUME_FLOW_RATE,
"14.080": SensorDeviceClass.APPARENT_POWER,
"29.010": SensorDeviceClass.ENERGY,
"29.012": SensorDeviceClass.REACTIVE_ENERGY,
}
_sensor_state_class_overrides: Mapping[str, SensorStateClass | None] = {
"5.003": SensorStateClass.MEASUREMENT_ANGLE, # DPTAngle
"5.006": None, # DPTTariff
"7.010": None, # DPTPropDataType
"8.011": SensorStateClass.MEASUREMENT_ANGLE, # DPTRotationAngle
"9.026": SensorStateClass.TOTAL_INCREASING, # DPTRainAmount
"12.1200": SensorStateClass.TOTAL, # DPTVolumeLiquidLitre
"12.1201": SensorStateClass.TOTAL, # DPTVolumeM3
"13.010": SensorStateClass.TOTAL, # DPTActiveEnergy
"13.011": SensorStateClass.TOTAL, # DPTApparantEnergy
"13.012": SensorStateClass.TOTAL, # DPTReactiveEnergy
"14.007": SensorStateClass.MEASUREMENT_ANGLE, # DPTAngleDeg
"14.037": SensorStateClass.TOTAL, # DPTHeatQuantity
"14.051": SensorStateClass.TOTAL, # DPTMass
"14.055": SensorStateClass.MEASUREMENT_ANGLE, # DPTPhaseAngleDeg
"14.031": SensorStateClass.TOTAL_INCREASING, # DPTEnergy
"17.001": None, # DPTSceneNumber
"29.010": SensorStateClass.TOTAL, # DPTActiveEnergy8Byte
"29.011": SensorStateClass.TOTAL, # DPTApparantEnergy8Byte
"29.012": SensorStateClass.TOTAL, # DPTReactiveEnergy8Byte
}
def _get_sensor_state_class(
ha_dpt_class: HaDptClass, dpt_number_str: str
) -> SensorStateClass | None:
"""Return the SensorStateClass for a given DPT."""
if ha_dpt_class != "numeric":
return None
return _sensor_state_class_overrides.get(
dpt_number_str,
SensorStateClass.MEASUREMENT,
)

View File

@@ -6,8 +6,8 @@ from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime, timedelta
from functools import partial
from typing import Any
from xknx import XKNX
from xknx.core.connection_state import XknxConnectionState, XknxConnectionType
from xknx.devices import Device as XknxDevice, Sensor as XknxSensor
@@ -25,20 +25,32 @@ from homeassistant.const import (
CONF_ENTITY_CATEGORY,
CONF_NAME,
CONF_TYPE,
CONF_UNIT_OF_MEASUREMENT,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
EntityCategory,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
async_get_current_platform,
)
from homeassistant.helpers.typing import ConfigType, StateType
from homeassistant.util.enum import try_parse_enum
from .const import ATTR_SOURCE, KNX_MODULE_KEY
from .entity import KnxYamlEntity
from .const import ATTR_SOURCE, CONF_SYNC_STATE, DOMAIN, KNX_MODULE_KEY
from .dpt import get_supported_dpts
from .entity import (
KnxUiEntity,
KnxUiEntityPlatformController,
KnxYamlEntity,
_KnxEntityBase,
)
from .knx_module import KNXModule
from .schema import SensorSchema
from .storage.const import CONF_ALWAYS_CALLBACK, CONF_ENTITY, CONF_GA_SENSOR
from .storage.util import ConfigExtractor
SCAN_INTERVAL = timedelta(seconds=10)
@@ -116,58 +128,41 @@ async def async_setup_entry(
config_entry: config_entries.ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up sensor(s) for KNX platform."""
"""Set up entities for KNX platform."""
knx_module = hass.data[KNX_MODULE_KEY]
platform = async_get_current_platform()
knx_module.config_store.add_platform(
platform=Platform.SENSOR,
controller=KnxUiEntityPlatformController(
knx_module=knx_module,
entity_platform=platform,
entity_class=KnxUiSensor,
),
)
entities: list[SensorEntity] = []
entities.extend(
KNXSystemSensor(knx_module, description)
for description in SYSTEM_ENTITY_DESCRIPTIONS
)
config: list[ConfigType] | None = knx_module.config_yaml.get(Platform.SENSOR)
if config:
if yaml_platform_config := knx_module.config_yaml.get(Platform.SENSOR):
entities.extend(
KNXSensor(knx_module, entity_config) for entity_config in config
KnxYamlSensor(knx_module, entity_config)
for entity_config in yaml_platform_config
)
if ui_config := knx_module.config_store.data["entities"].get(Platform.SENSOR):
entities.extend(
KnxUiSensor(knx_module, unique_id, config)
for unique_id, config in ui_config.items()
)
async_add_entities(entities)
def _create_sensor(xknx: XKNX, config: ConfigType) -> XknxSensor:
"""Return a KNX sensor to be used within XKNX."""
return XknxSensor(
xknx,
name=config[CONF_NAME],
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[SensorSchema.CONF_SYNC_STATE],
always_callback=True,
value_type=config[CONF_TYPE],
)
class KNXSensor(KnxYamlEntity, RestoreSensor):
class _KnxSensor(RestoreSensor, _KnxEntityBase):
"""Representation of a KNX sensor."""
_device: XknxSensor
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of a KNX sensor."""
super().__init__(
knx_module=knx_module,
device=_create_sensor(knx_module.xknx, config),
)
if device_class := config.get(CONF_DEVICE_CLASS):
self._attr_device_class = device_class
else:
self._attr_device_class = try_parse_enum(
SensorDeviceClass, self._device.ha_device_class()
)
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
self._attr_state_class = config.get(CONF_STATE_CLASS)
self._attr_extra_state_attributes = {}
async def async_added_to_hass(self) -> None:
"""Restore last state."""
if (
@@ -192,6 +187,89 @@ class KNXSensor(KnxYamlEntity, RestoreSensor):
super().after_update_callback(device)
class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
"""Representation of a KNX sensor configured from YAML."""
_device: XknxSensor
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of a KNX sensor."""
super().__init__(
knx_module=knx_module,
device=XknxSensor(
knx_module.xknx,
name=config[CONF_NAME],
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[CONF_SYNC_STATE],
always_callback=True,
value_type=config[CONF_TYPE],
),
)
if device_class := config.get(CONF_DEVICE_CLASS):
self._attr_device_class = device_class
else:
self._attr_device_class = try_parse_enum(
SensorDeviceClass, self._device.ha_device_class()
)
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
self._attr_state_class = config.get(CONF_STATE_CLASS)
self._attr_extra_state_attributes = {}
class KnxUiSensor(_KnxSensor, KnxUiEntity):
"""Representation of a KNX sensor configured from the UI."""
_device: XknxSensor
def __init__(
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
) -> None:
"""Initialize KNX sensor."""
super().__init__(
knx_module=knx_module,
unique_id=unique_id,
entity_config=config[CONF_ENTITY],
)
knx_conf = ConfigExtractor(config[DOMAIN])
dpt_string = knx_conf.get_dpt(CONF_GA_SENSOR)
assert dpt_string is not None # required for sensor
dpt_info = get_supported_dpts()[dpt_string]
self._device = XknxSensor(
knx_module.xknx,
name=config[CONF_ENTITY][CONF_NAME],
group_address_state=knx_conf.get_state_and_passive(CONF_GA_SENSOR),
sync_state=knx_conf.get(CONF_SYNC_STATE),
always_callback=True,
value_type=dpt_string,
)
if device_class_override := knx_conf.get(CONF_DEVICE_CLASS):
self._attr_device_class = try_parse_enum(
SensorDeviceClass, device_class_override
)
else:
self._attr_device_class = dpt_info["sensor_device_class"]
if state_class_override := knx_conf.get(CONF_STATE_CLASS):
self._attr_state_class = try_parse_enum(
SensorStateClass, state_class_override
)
else:
self._attr_state_class = dpt_info["sensor_state_class"]
self._attr_native_unit_of_measurement = (
knx_conf.get(CONF_UNIT_OF_MEASUREMENT) or dpt_info["unit"]
)
self._attr_force_update = knx_conf.get(CONF_ALWAYS_CALLBACK, default=False)
self._attr_extra_state_attributes = {}
class KNXSystemSensor(SensorEntity):
"""Representation of a KNX system sensor."""

View File

@@ -65,3 +65,6 @@ CONF_GA_WHITE_BRIGHTNESS: Final = "ga_white_brightness"
CONF_GA_WHITE_SWITCH: Final = "ga_white_switch"
CONF_GA_HUE: Final = "ga_hue"
CONF_GA_SATURATION: Final = "ga_saturation"
# Sensor
CONF_ALWAYS_CALLBACK: Final = "always_callback"

View File

@@ -5,11 +5,21 @@ from enum import StrEnum, unique
import voluptuous as vol
from homeassistant.components.climate import HVACMode
from homeassistant.components.sensor import (
CONF_STATE_CLASS as CONF_SENSOR_STATE_CLASS,
DEVICE_CLASS_STATE_CLASSES,
DEVICE_CLASS_UNITS,
STATE_CLASS_UNITS,
SensorDeviceClass,
SensorStateClass,
)
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_ENTITY_CATEGORY,
CONF_ENTITY_ID,
CONF_NAME,
CONF_PLATFORM,
CONF_UNIT_OF_MEASUREMENT,
Platform,
)
from homeassistant.helpers import config_validation as cv, selector
@@ -30,12 +40,15 @@ from ..const import (
CoverConf,
FanZeroMode,
)
from ..dpt import get_supported_dpts
from .const import (
CONF_ALWAYS_CALLBACK,
CONF_COLOR,
CONF_COLOR_TEMP_MAX,
CONF_COLOR_TEMP_MIN,
CONF_DATA,
CONF_DEVICE_INFO,
CONF_DPT,
CONF_ENTITY,
CONF_GA_ACTIVE,
CONF_GA_ANGLE,
@@ -507,6 +520,114 @@ CLIMATE_KNX_SCHEMA = vol.Schema(
},
)
def _validate_sensor_attributes(config: dict) -> dict:
"""Validate that state_class is compatible with device_class and unit_of_measurement."""
dpt = config[CONF_GA_SENSOR][CONF_DPT]
dpt_metadata = get_supported_dpts()[dpt]
state_class = config.get(
CONF_SENSOR_STATE_CLASS,
dpt_metadata["sensor_state_class"],
)
device_class = config.get(
CONF_DEVICE_CLASS,
dpt_metadata["sensor_device_class"],
)
unit_of_measurement = config.get(
CONF_UNIT_OF_MEASUREMENT,
dpt_metadata["unit"],
)
if (
state_class
and device_class
and (state_classes := DEVICE_CLASS_STATE_CLASSES.get(device_class)) is not None
and state_class not in state_classes
):
raise vol.Invalid(
f"State class '{state_class}' is not valid for device class '{device_class}'. "
f"Valid options are: {', '.join(sorted(map(str, state_classes), key=str.casefold))}",
path=[CONF_SENSOR_STATE_CLASS],
)
if (
device_class
and (d_c_units := DEVICE_CLASS_UNITS.get(device_class)) is not None
and unit_of_measurement not in d_c_units
):
raise vol.Invalid(
f"Unit of measurement '{unit_of_measurement}' is not valid for device class '{device_class}'. "
f"Valid options are: {', '.join(sorted(map(str, d_c_units), key=str.casefold))}",
path=(
[CONF_DEVICE_CLASS]
if CONF_DEVICE_CLASS in config
else [CONF_UNIT_OF_MEASUREMENT]
),
)
if (
state_class
and (s_c_units := STATE_CLASS_UNITS.get(state_class)) is not None
and unit_of_measurement not in s_c_units
):
raise vol.Invalid(
f"Unit of measurement '{unit_of_measurement}' is not valid for state class '{state_class}'. "
f"Valid options are: {', '.join(sorted(map(str, s_c_units), key=str.casefold))}",
path=(
[CONF_SENSOR_STATE_CLASS]
if CONF_SENSOR_STATE_CLASS in config
else [CONF_UNIT_OF_MEASUREMENT]
),
)
return config
SENSOR_KNX_SCHEMA = AllSerializeFirst(
vol.Schema(
{
vol.Required(CONF_GA_SENSOR): GASelector(
write=False, state_required=True, dpt=["numeric", "string"]
),
"section_advanced_options": KNXSectionFlat(collapsible=True),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): selector.SelectSelector(
selector.SelectSelectorConfig(
options=sorted(
{
str(unit)
for units in DEVICE_CLASS_UNITS.values()
for unit in units
if unit is not None
}
),
mode=selector.SelectSelectorMode.DROPDOWN,
translation_key="component.knx.selector.sensor_unit_of_measurement",
custom_value=True,
),
),
vol.Optional(CONF_DEVICE_CLASS): selector.SelectSelector(
selector.SelectSelectorConfig(
options=[
cls.value
for cls in SensorDeviceClass
if cls != SensorDeviceClass.ENUM
],
translation_key="component.knx.selector.sensor_device_class",
sort=True,
)
),
vol.Optional(CONF_SENSOR_STATE_CLASS): selector.SelectSelector(
selector.SelectSelectorConfig(
options=list(SensorStateClass),
translation_key="component.knx.selector.sensor_state_class",
mode=selector.SelectSelectorMode.DROPDOWN,
)
),
vol.Optional(CONF_ALWAYS_CALLBACK): selector.BooleanSelector(),
vol.Required(CONF_SYNC_STATE, default=True): SyncStateSelector(
allow_false=True
),
},
),
_validate_sensor_attributes,
)
KNX_SCHEMA_FOR_PLATFORM = {
Platform.BINARY_SENSOR: BINARY_SENSOR_KNX_SCHEMA,
Platform.CLIMATE: CLIMATE_KNX_SCHEMA,
@@ -514,6 +635,7 @@ KNX_SCHEMA_FOR_PLATFORM = {
Platform.DATE: DATE_KNX_SCHEMA,
Platform.DATETIME: DATETIME_KNX_SCHEMA,
Platform.LIGHT: LIGHT_KNX_SCHEMA,
Platform.SENSOR: SENSOR_KNX_SCHEMA,
Platform.SWITCH: SWITCH_KNX_SCHEMA,
Platform.TIME: TIME_KNX_SCHEMA,
}

View File

@@ -6,6 +6,7 @@ from typing import Any
import voluptuous as vol
from ..dpt import HaDptClass, get_supported_dpts
from ..validation import ga_validator, maybe_ga_validator, sync_state_validator
from .const import CONF_DPT, CONF_GA_PASSIVE, CONF_GA_STATE, CONF_GA_WRITE
from .util import dpt_string_to_dict
@@ -162,7 +163,7 @@ class GASelector(KNXSelectorBase):
passive: bool = True,
write_required: bool = False,
state_required: bool = False,
dpt: type[Enum] | None = None,
dpt: type[Enum] | list[HaDptClass] | None = None,
valid_dpt: str | Iterable[str] | None = None,
) -> None:
"""Initialize the group address selector."""
@@ -186,14 +187,17 @@ class GASelector(KNXSelectorBase):
"passive": self.passive,
}
if self.dpt is not None:
options["dptSelect"] = [
{
"value": item.value,
"translation_key": item.value.replace(".", "_"),
"dpt": dpt_string_to_dict(item.value), # used for filtering GAs
}
for item in self.dpt
]
if isinstance(self.dpt, list):
options["dptClasses"] = self.dpt
else:
options["dptSelect"] = [
{
"value": item.value,
"translation_key": item.value.replace(".", "_"),
"dpt": dpt_string_to_dict(item.value), # used for filtering GAs
}
for item in self.dpt
]
if self.valid_dpt is not None:
options["validDPTs"] = [dpt_string_to_dict(dpt) for dpt in self.valid_dpt]
@@ -254,7 +258,12 @@ class GASelector(KNXSelectorBase):
def _add_dpt(self, schema: dict[vol.Marker, Any]) -> None:
"""Add DPT validator to the schema."""
if self.dpt is not None:
schema[vol.Required(CONF_DPT)] = vol.In({item.value for item in self.dpt})
if isinstance(self.dpt, list):
schema[vol.Required(CONF_DPT)] = vol.In(get_supported_dpts())
else:
schema[vol.Required(CONF_DPT)] = vol.In(
{item.value for item in self.dpt}
)
else:
schema[vol.Remove(CONF_DPT)] = object

View File

@@ -558,6 +558,35 @@
}
}
},
"sensor": {
"description": "Read-only entity for numeric or string datapoints. Temperature, percent etc.",
"knx": {
"always_callback": {
"description": "Write each update to the state machine, even if the data is the same.",
"label": "Force update"
},
"device_class": {
"description": "Override the DPTs default device class.",
"label": "Device class"
},
"ga_sensor": {
"description": "Group address representing state.",
"label": "State"
},
"section_advanced_options": {
"description": "Override default DPT-based sensor attributes.",
"title": "Overrides"
},
"state_class": {
"description": "Override the DPTs default state class.",
"label": "[%key:component::sensor::entity_component::_::state_attributes::state_class::name%]"
},
"unit_of_measurement": {
"description": "Override the DPTs default unit of measurement.",
"label": "Unit of measurement"
}
}
},
"switch": {
"description": "The KNX switch platform is used as an interface to switching actuators.",
"knx": {
@@ -688,6 +717,79 @@
}
}
},
"selector": {
"sensor_device_class": {
"options": {
"absolute_humidity": "[%key:component::sensor::entity_component::absolute_humidity::name%]",
"apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]",
"aqi": "[%key:component::sensor::entity_component::aqi::name%]",
"area": "[%key:component::sensor::entity_component::area::name%]",
"atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]",
"battery": "[%key:component::sensor::entity_component::battery::name%]",
"blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]",
"carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
"carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
"conductivity": "[%key:component::sensor::entity_component::conductivity::name%]",
"current": "[%key:component::sensor::entity_component::current::name%]",
"data_rate": "[%key:component::sensor::entity_component::data_rate::name%]",
"data_size": "[%key:component::sensor::entity_component::data_size::name%]",
"date": "[%key:component::sensor::entity_component::date::name%]",
"distance": "[%key:component::sensor::entity_component::distance::name%]",
"duration": "[%key:component::sensor::entity_component::duration::name%]",
"energy": "[%key:component::sensor::entity_component::energy::name%]",
"energy_distance": "[%key:component::sensor::entity_component::energy_distance::name%]",
"energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]",
"frequency": "[%key:component::sensor::entity_component::frequency::name%]",
"gas": "[%key:component::sensor::entity_component::gas::name%]",
"humidity": "[%key:component::sensor::entity_component::humidity::name%]",
"illuminance": "[%key:component::sensor::entity_component::illuminance::name%]",
"irradiance": "[%key:component::sensor::entity_component::irradiance::name%]",
"moisture": "[%key:component::sensor::entity_component::moisture::name%]",
"monetary": "[%key:component::sensor::entity_component::monetary::name%]",
"nitrogen_dioxide": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
"nitrogen_monoxide": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]",
"nitrous_oxide": "[%key:component::sensor::entity_component::nitrous_oxide::name%]",
"ozone": "[%key:component::sensor::entity_component::ozone::name%]",
"ph": "[%key:component::sensor::entity_component::ph::name%]",
"pm1": "[%key:component::sensor::entity_component::pm1::name%]",
"pm10": "[%key:component::sensor::entity_component::pm10::name%]",
"pm25": "[%key:component::sensor::entity_component::pm25::name%]",
"pm4": "[%key:component::sensor::entity_component::pm4::name%]",
"power": "[%key:component::sensor::entity_component::power::name%]",
"power_factor": "[%key:component::sensor::entity_component::power_factor::name%]",
"precipitation": "[%key:component::sensor::entity_component::precipitation::name%]",
"precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]",
"pressure": "[%key:component::sensor::entity_component::pressure::name%]",
"reactive_energy": "[%key:component::sensor::entity_component::reactive_energy::name%]",
"reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]",
"signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]",
"sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]",
"speed": "[%key:component::sensor::entity_component::speed::name%]",
"sulphur_dioxide": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]",
"temperature": "[%key:component::sensor::entity_component::temperature::name%]",
"temperature_delta": "[%key:component::sensor::entity_component::temperature_delta::name%]",
"timestamp": "[%key:component::sensor::entity_component::timestamp::name%]",
"volatile_organic_compounds": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]",
"volatile_organic_compounds_parts": "[%key:component::sensor::entity_component::volatile_organic_compounds_parts::name%]",
"voltage": "[%key:component::sensor::entity_component::voltage::name%]",
"volume": "[%key:component::sensor::entity_component::volume::name%]",
"volume_flow_rate": "[%key:component::sensor::entity_component::volume_flow_rate::name%]",
"volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]",
"water": "[%key:component::sensor::entity_component::water::name%]",
"weight": "[%key:component::sensor::entity_component::weight::name%]",
"wind_direction": "[%key:component::sensor::entity_component::wind_direction::name%]",
"wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]"
}
},
"sensor_state_class": {
"options": {
"measurement": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement%]",
"measurement_angle": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement_angle%]",
"total": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total%]",
"total_increasing": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total_increasing%]"
}
}
},
"services": {
"event_register": {
"description": "Adds or removes group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this action can be removed.",

View File

@@ -22,6 +22,7 @@ from homeassistant.helpers.typing import UNDEFINED
from homeassistant.util.ulid import ulid_now
from .const import DOMAIN, KNX_MODULE_KEY, SUPPORTED_PLATFORMS_UI
from .dpt import get_supported_dpts
from .storage.config_store import ConfigStoreException
from .storage.const import CONF_DATA
from .storage.entity_store_schema import (
@@ -186,6 +187,7 @@ def ws_get_base_data(
msg["id"],
{
"connection_info": connection_info,
"dpt_metadata": get_supported_dpts(),
"project_info": _project_info,
"supported_platforms": sorted(SUPPORTED_PLATFORMS_UI),
},

View File

@@ -7,11 +7,10 @@ in the Home Assistant Labs UI for users to enable or disable.
from __future__ import annotations
from collections.abc import Callable
import logging
from homeassistant.const import EVENT_LABS_UPDATED
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.storage import Store
@@ -19,6 +18,7 @@ from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_custom_components
from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
from .helpers import async_is_preview_feature_enabled, async_listen
from .models import (
EventLabsUpdatedData,
LabPreviewFeature,
@@ -135,55 +135,3 @@ async def _async_scan_all_preview_features(
_LOGGER.debug("Loaded %d total lab preview features", len(preview_features))
return preview_features
@callback
def async_is_preview_feature_enabled(
hass: HomeAssistant, domain: str, preview_feature: str
) -> bool:
"""Check if a lab preview feature is enabled.
Args:
hass: HomeAssistant instance
domain: Integration domain
preview_feature: Preview feature name
Returns:
True if the preview feature is enabled, False otherwise
"""
if LABS_DATA not in hass.data:
return False
labs_data = hass.data[LABS_DATA]
return (domain, preview_feature) in labs_data.data.preview_feature_status
@callback
def async_listen(
hass: HomeAssistant,
domain: str,
preview_feature: str,
listener: Callable[[], None],
) -> Callable[[], None]:
"""Listen for changes to a specific preview feature.
Args:
hass: HomeAssistant instance
domain: Integration domain
preview_feature: Preview feature name
listener: Callback to invoke when the preview feature is toggled
Returns:
Callable to unsubscribe from the listener
"""
@callback
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
"""Handle labs feature update event."""
if (
event.data["domain"] == domain
and event.data["preview_feature"] == preview_feature
):
listener()
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)

View File

@@ -0,0 +1,63 @@
"""Helper functions for the Home Assistant Labs integration."""
from __future__ import annotations
from collections.abc import Callable
from homeassistant.const import EVENT_LABS_UPDATED
from homeassistant.core import Event, HomeAssistant, callback
from .const import LABS_DATA
from .models import EventLabsUpdatedData
@callback
def async_is_preview_feature_enabled(
hass: HomeAssistant, domain: str, preview_feature: str
) -> bool:
"""Check if a lab preview feature is enabled.
Args:
hass: HomeAssistant instance
domain: Integration domain
preview_feature: Preview feature name
Returns:
True if the preview feature is enabled, False otherwise
"""
if LABS_DATA not in hass.data:
return False
labs_data = hass.data[LABS_DATA]
return (domain, preview_feature) in labs_data.data.preview_feature_status
@callback
def async_listen(
hass: HomeAssistant,
domain: str,
preview_feature: str,
listener: Callable[[], None],
) -> Callable[[], None]:
"""Listen for changes to a specific preview feature.
Args:
hass: HomeAssistant instance
domain: Integration domain
preview_feature: Preview feature name
listener: Callback to invoke when the preview feature is toggled
Returns:
Callable to unsubscribe from the listener
"""
@callback
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
"""Handle labs feature update event."""
if (
event.data["domain"] == domain
and event.data["preview_feature"] == preview_feature
):
listener()
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)

View File

@@ -12,6 +12,7 @@ from homeassistant.const import EVENT_LABS_UPDATED
from homeassistant.core import HomeAssistant, callback
from .const import LABS_DATA
from .helpers import async_is_preview_feature_enabled, async_listen
from .models import EventLabsUpdatedData
@@ -20,6 +21,7 @@ def async_setup(hass: HomeAssistant) -> None:
"""Set up the number websocket API."""
websocket_api.async_register_command(hass, websocket_list_preview_features)
websocket_api.async_register_command(hass, websocket_update_preview_feature)
websocket_api.async_register_command(hass, websocket_subscribe_feature)
@callback
@@ -108,3 +110,52 @@ async def websocket_update_preview_feature(
hass.bus.async_fire(EVENT_LABS_UPDATED, event_data)
connection.send_result(msg["id"])
@callback
@websocket_api.websocket_command(
{
vol.Required("type"): "labs/subscribe",
vol.Required("domain"): str,
vol.Required("preview_feature"): str,
}
)
def websocket_subscribe_feature(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to a specific lab preview feature updates."""
domain = msg["domain"]
preview_feature_key = msg["preview_feature"]
labs_data = hass.data[LABS_DATA]
preview_feature_id = f"{domain}.{preview_feature_key}"
if preview_feature_id not in labs_data.preview_features:
connection.send_error(
msg["id"],
websocket_api.ERR_NOT_FOUND,
f"Preview feature {preview_feature_id} not found",
)
return
preview_feature = labs_data.preview_features[preview_feature_id]
@callback
def send_event() -> None:
"""Send feature state to client."""
enabled = async_is_preview_feature_enabled(hass, domain, preview_feature_key)
connection.send_message(
websocket_api.event_message(
msg["id"],
preview_feature.to_dict(enabled=enabled),
)
)
connection.subscriptions[msg["id"]] = async_listen(
hass, domain, preview_feature_key, send_event
)
connection.send_result(msg["id"])
send_event()

View File

@@ -108,6 +108,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
SERVICE_MESSAGE,
_async_service_message,
schema=SERVICE_MESSAGE_SCHEMA,
description_placeholders={"icons_url": "https://developer.lametric.com/icons"},
)

View File

@@ -211,7 +211,7 @@
"name": "[%key:common::config_flow::data::device%]"
},
"icon": {
"description": "The ID number of the icon or animation to display. List of all icons and their IDs can be found at: https://developer.lametric.com/icons.",
"description": "The ID number of the icon or animation to display. List of all icons and their IDs can be found at: {icons_url}.",
"name": "Icon ID"
},
"icon_type": {

View File

@@ -52,7 +52,7 @@ class StateConditionBase(Condition):
self, hass: HomeAssistant, config: ConditionConfig, state: str
) -> None:
"""Initialize condition."""
self._hass = hass
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.target
assert config.options

View File

@@ -14,14 +14,9 @@ from homeassistant.config import (
from homeassistant.const import CONF_FILENAME, CONF_MODE, CONF_RESOURCES
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import (
collection,
config_validation as cv,
issue_registry as ir,
)
from homeassistant.helpers import collection, config_validation as cv
from homeassistant.helpers.frame import report_usage
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.storage import Store
from homeassistant.helpers.translation import async_get_translations
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_integration
@@ -39,7 +34,6 @@ from .const import ( # noqa: F401
DEFAULT_ICON,
DOMAIN,
EVENT_LOVELACE_UPDATED,
LOVELACE_CONFIG_FILE,
LOVELACE_DATA,
MODE_STORAGE,
MODE_YAML,
@@ -141,19 +135,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
mode = config[DOMAIN][CONF_MODE]
yaml_resources = config[DOMAIN].get(CONF_RESOURCES)
# Deprecated - Remove in 2026.6
# For YAML mode, register the default panel (temporary until user migrates)
if mode == MODE_YAML:
frontend.async_register_built_in_panel(
hass,
DOMAIN,
config={"mode": mode},
sidebar_title="overview",
sidebar_icon="mdi:view-dashboard",
sidebar_default_visible=False,
)
_async_create_yaml_mode_repair(hass)
# End deprecation
frontend.async_register_built_in_panel(
hass,
DOMAIN,
config={"mode": mode},
sidebar_title="overview",
sidebar_icon="mdi:view-dashboard",
sidebar_default_visible=False,
)
async def reload_resources_service_handler(service_call: ServiceCall) -> None:
"""Reload yaml resources."""
@@ -249,13 +238,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
if change_type == collection.CHANGE_REMOVED:
frontend.async_remove_panel(hass, url_path)
await hass.data[LOVELACE_DATA].dashboards.pop(url_path).async_delete()
# Re-register default lovelace panel if the "lovelace" dashboard was deleted
if url_path == DOMAIN:
frontend.async_register_built_in_panel(
hass,
DOMAIN,
config={"mode": MODE_STORAGE},
)
return
if change_type == collection.CHANGE_ADDED:
@@ -300,22 +282,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
dashboards_collection.async_add_listener(storage_dashboard_changed)
await dashboards_collection.async_load()
# Migrate default lovelace panel to dashboard entry (storage mode only)
if mode == MODE_STORAGE:
await _async_migrate_default_config(hass, dashboards_collection)
# Register default lovelace panel if no panel exists yet
# (new installation without YAML mode and no migrated dashboard)
if (
frontend.DATA_PANELS not in hass.data
or DOMAIN not in hass.data[frontend.DATA_PANELS]
):
frontend.async_register_built_in_panel(
hass,
DOMAIN,
config={"mode": MODE_STORAGE},
)
dashboard.DashboardsCollectionWebSocket(
dashboards_collection,
"lovelace/dashboards",
@@ -394,94 +360,3 @@ async def _create_map_dashboard(
map_store = hass.data[LOVELACE_DATA].dashboards["map"]
await map_store.async_save({"strategy": {"type": "map"}})
async def _async_migrate_default_config(
hass: HomeAssistant, dashboards_collection: dashboard.DashboardsCollection
) -> None:
"""Migrate default lovelace storage config to a named dashboard entry.
This migration:
1. Checks if .storage/lovelace exists with data
2. Checks if a dashboard with url_path "lovelace" already exists (skip if so)
3. Checks if .storage/lovelace.lovelace already exists (skip if so - incomplete migration)
4. Creates a new dashboard entry with url_path "lovelace"
5. Copies data to .storage/lovelace.lovelace
6. Removes old .storage/lovelace file
"""
# Check if already migrated (dashboard with url_path "lovelace" exists)
for item in dashboards_collection.async_items():
if item.get(CONF_URL_PATH) == DOMAIN:
return
# Check if old storage data exists
old_store = Store[dict[str, Any]](
hass, dashboard.CONFIG_STORAGE_VERSION, dashboard.CONFIG_STORAGE_KEY_DEFAULT
)
old_data = await old_store.async_load()
if old_data is None or old_data.get("config") is None:
return
# Check if new storage data already exists (incomplete previous migration)
new_store = Store[dict[str, Any]](
hass,
dashboard.CONFIG_STORAGE_VERSION,
dashboard.CONFIG_STORAGE_KEY.format(DOMAIN),
)
new_data = await new_store.async_load()
if new_data is not None:
_LOGGER.warning(
"Both old and new lovelace storage files exist, skipping migration"
)
return
_LOGGER.info("Migrating default lovelace config to dashboard entry")
# Get translated title for the dashboard
translations = await async_get_translations(
hass, hass.config.language, "dashboard", {onboarding.DOMAIN}
)
title = translations.get(
"component.onboarding.dashboard.overview.title", "Overview"
)
# Create dashboard entry
try:
await dashboards_collection.async_create_item(
{
CONF_ALLOW_SINGLE_WORD: True,
CONF_ICON: DEFAULT_ICON,
CONF_TITLE: title,
CONF_URL_PATH: DOMAIN,
}
)
except Exception:
_LOGGER.exception("Failed to create dashboard entry during migration")
return
# Save data to new location
await new_store.async_save(old_data)
# Remove old file
await old_store.async_remove()
_LOGGER.info("Successfully migrated default lovelace config to dashboard entry")
# Deprecated - Remove in 2026.6
@callback
def _async_create_yaml_mode_repair(hass: HomeAssistant) -> None:
"""Create repair issue for YAML mode migration."""
ir.async_create_issue(
hass,
DOMAIN,
"yaml_mode_deprecated",
breaks_in_ha_version="2026.6.0",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key="yaml_mode_deprecated",
translation_placeholders={"config_file": LOVELACE_CONFIG_FILE},
)
# End deprecation

View File

@@ -286,7 +286,7 @@ class DashboardsCollection(collection.DictStorageCollection):
if not allow_single_word and "-" not in url_path:
raise vol.Invalid("Url path needs to contain a hyphen (-)")
if DATA_PANELS in self.hass.data and url_path in self.hass.data[DATA_PANELS]:
if url_path in self.hass.data[DATA_PANELS]:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="url_already_exists",

View File

@@ -4,12 +4,6 @@
"message": "The URL \"{url}\" is already in use. Please choose a different one."
}
},
"issues": {
"yaml_mode_deprecated": {
"description": "Starting with Home Assistant 2026.6, the default Lovelace dashboard will no longer support YAML mode. To migrate:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. Rename `{config_file}` to a new filename (e.g., `my-dashboard.yaml`)\n3. Add a dashboard entry in your `configuration.yaml`:\n\n```yaml\nlovelace:\n dashboards:\n lovelace:\n mode: yaml\n filename: my-dashboard.yaml\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n```\n\n4. Restart Home Assistant",
"title": "Lovelace YAML mode migration required"
}
},
"services": {
"reload_resources": {
"description": "Reloads dashboard resources from the YAML-configuration.",

View File

@@ -30,9 +30,7 @@ rules:
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow:
status: exempt
comment: Devices don't require authentication
reauthentication-flow: done
test-coverage: todo
# Gold

View File

@@ -5,7 +5,6 @@
"living_room": "Living Room"
},
"dashboard": {
"map": { "title": "Map" },
"overview": { "title": "Overview" }
"map": { "title": "Map" }
}
}

View File

@@ -129,4 +129,5 @@ def async_setup_services(hass: HomeAssistant) -> None:
async_handle_upload,
schema=UPLOAD_SERVICE_SCHEMA,
supports_response=SupportsResponse.OPTIONAL,
description_placeholders={"example_image_path": "/config/www/image.jpg"},
)

View File

@@ -156,7 +156,7 @@
},
"filename": {
"description": "Path to the file to upload.",
"example": "/config/www/image.jpg",
"example": "{example_image_path}",
"name": "Filename"
}
},

View File

@@ -64,10 +64,11 @@ class PingDataICMPLib(PingData):
return
_LOGGER.debug(
"async_ping returned: reachable=%s sent=%i received=%s",
"async_ping returned: reachable=%s sent=%i received=%s loss=%s",
data.is_alive,
data.packets_sent,
data.packets_received,
data.packet_loss * 100,
)
self.is_alive = data.is_alive
@@ -80,6 +81,7 @@ class PingDataICMPLib(PingData):
"max": data.max_rtt,
"avg": data.avg_rtt,
"jitter": data.jitter,
"loss": data.packet_loss * 100,
}

View File

@@ -0,0 +1,9 @@
{
"entity": {
"sensor": {
"loss": {
"default": "mdi:alert-circle-outline"
}
}
}
}

View File

@@ -10,7 +10,7 @@ from homeassistant.components.sensor import (
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory, UnitOfTime
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTime
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -82,6 +82,16 @@ SENSORS: tuple[PingSensorEntityDescription, ...] = (
value_fn=lambda result: result.data.get("jitter"),
has_fn=lambda result: "jitter" in result.data,
),
PingSensorEntityDescription(
key="loss",
translation_key="loss",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda result: result.data.get("loss"),
has_fn=lambda result: "loss" in result.data,
),
)

View File

@@ -22,6 +22,9 @@
"jitter": {
"name": "Jitter"
},
"loss": {
"name": "Packet loss"
},
"round_trip_time_avg": {
"name": "Round-trip time average"
},

View File

@@ -1,20 +1,23 @@
"""The Rituals Perfume Genie integration."""
import asyncio
import logging
import aiohttp
from pyrituals import Account, Diffuser
from aiohttp import ClientError, ClientResponseError
from pyrituals import Account, AuthenticationException, Diffuser
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import ACCOUNT_HASH, DOMAIN, UPDATE_INTERVAL
from .coordinator import RitualsDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.NUMBER,
@@ -26,12 +29,38 @@ PLATFORMS = [
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Rituals Perfume Genie from a config entry."""
# Initiate reauth for old config entries which don't have username / password in the entry data
if CONF_EMAIL not in entry.data or CONF_PASSWORD not in entry.data:
raise ConfigEntryAuthFailed("Missing credentials")
session = async_get_clientsession(hass)
account = Account(session=session, account_hash=entry.data[ACCOUNT_HASH])
account = Account(
email=entry.data[CONF_EMAIL],
password=entry.data[CONF_PASSWORD],
session=session,
)
try:
# Authenticate first so API token/cookies are available for subsequent calls
await account.authenticate()
account_devices = await account.get_devices()
except aiohttp.ClientError as err:
except AuthenticationException as err:
# Credentials invalid/expired -> raise AuthFailed to trigger reauth flow
raise ConfigEntryAuthFailed(err) from err
except ClientResponseError as err:
_LOGGER.debug(
"HTTP error during Rituals setup: status=%s, url=%s, headers=%s",
err.status,
err.request_info,
dict(err.headers or {}),
)
raise ConfigEntryNotReady from err
except ClientError as err:
raise ConfigEntryNotReady from err
# Migrate old unique_ids to the new format
@@ -45,7 +74,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
# Create a coordinator for each diffuser
coordinators = {
diffuser.hublot: RitualsDataUpdateCoordinator(
hass, entry, diffuser, update_interval
hass, entry, account, diffuser, update_interval
)
for diffuser in account_devices
}
@@ -106,3 +135,14 @@ def async_migrate_entities_unique_ids(
registry_entry.entity_id,
new_unique_id=f"{diffuser.hublot}-{new_unique_id}",
)
# Migration helpers for API v2
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Migrate config entry to version 2: drop legacy ACCOUNT_HASH and bump version."""
if entry.version < 2:
data = dict(entry.data)
data.pop(ACCOUNT_HASH, None)
hass.config_entries.async_update_entry(entry, data=data, version=2)
return True
return True

View File

@@ -2,10 +2,10 @@
from __future__ import annotations
import logging
from typing import Any
from collections.abc import Mapping
from typing import TYPE_CHECKING, Any
from aiohttp import ClientResponseError
from aiohttp import ClientError
from pyrituals import Account, AuthenticationException
import voluptuous as vol
@@ -13,9 +13,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import ACCOUNT_HASH, DOMAIN
_LOGGER = logging.getLogger(__name__)
from .const import DOMAIN
DATA_SCHEMA = vol.Schema(
{
@@ -28,39 +26,88 @@ DATA_SCHEMA = vol.Schema(
class RitualsPerfumeGenieConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Rituals Perfume Genie."""
VERSION = 1
VERSION = 2
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
if user_input is None:
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)
errors = {}
session = async_get_clientsession(self.hass)
account = Account(user_input[CONF_EMAIL], user_input[CONF_PASSWORD], session)
try:
await account.authenticate()
except ClientResponseError:
_LOGGER.exception("Unexpected response")
errors["base"] = "cannot_connect"
except AuthenticationException:
errors["base"] = "invalid_auth"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(account.email)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=account.email,
data={ACCOUNT_HASH: account.account_hash},
errors: dict[str, str] = {}
if user_input is not None:
session = async_get_clientsession(self.hass)
account = Account(
email=user_input[CONF_EMAIL],
password=user_input[CONF_PASSWORD],
session=session,
)
try:
await account.authenticate()
except AuthenticationException:
errors["base"] = "invalid_auth"
except ClientError:
errors["base"] = "cannot_connect"
else:
await self.async_set_unique_id(user_input[CONF_EMAIL])
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input[CONF_EMAIL],
data=user_input,
)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle re-authentication with Rituals."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Form to log in again."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if TYPE_CHECKING:
assert reauth_entry.unique_id is not None
if user_input:
session = async_get_clientsession(self.hass)
account = Account(
email=reauth_entry.unique_id,
password=user_input[CONF_PASSWORD],
session=session,
)
try:
await account.authenticate()
except AuthenticationException:
errors["base"] = "invalid_auth"
except ClientError:
errors["base"] = "cannot_connect"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data={
CONF_EMAIL: reauth_entry.unique_id,
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(
{
vol.Required(CONF_PASSWORD): str,
}
),
reauth_entry.data,
),
errors=errors,
)

View File

@@ -4,6 +4,7 @@ from datetime import timedelta
DOMAIN = "rituals_perfume_genie"
# Old (API V1)
ACCOUNT_HASH = "account_hash"
# The API provided by Rituals is currently rate limited to 30 requests

View File

@@ -3,11 +3,13 @@
from datetime import timedelta
import logging
from pyrituals import Diffuser
from aiohttp import ClientError, ClientResponseError
from pyrituals import Account, AuthenticationException, Diffuser
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
@@ -23,10 +25,12 @@ class RitualsDataUpdateCoordinator(DataUpdateCoordinator[None]):
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
account: Account,
diffuser: Diffuser,
update_interval: timedelta,
) -> None:
"""Initialize global Rituals Perfume Genie data updater."""
self.account = account
self.diffuser = diffuser
super().__init__(
hass,
@@ -37,5 +41,36 @@ class RitualsDataUpdateCoordinator(DataUpdateCoordinator[None]):
)
async def _async_update_data(self) -> None:
"""Fetch data from Rituals."""
await self.diffuser.update_data()
"""Fetch data from Rituals, with one silent re-auth on 401.
If silent re-auth also fails, raise ConfigEntryAuthFailed to trigger reauth flow.
Other HTTP/network errors are wrapped in UpdateFailed so HA can retry.
"""
try:
await self.diffuser.update_data()
except (AuthenticationException, ClientResponseError) as err:
# Treat 401/403 like AuthenticationException → one silent re-auth, single retry
if isinstance(err, ClientResponseError) and (status := err.status) not in (
401,
403,
):
# Non-auth HTTP error → let HA retry
raise UpdateFailed(f"HTTP {status}") from err
self.logger.debug(
"Auth issue detected (%r). Attempting silent re-auth.", err
)
try:
await self.account.authenticate()
await self.diffuser.update_data()
except AuthenticationException as err2:
# Credentials invalid → trigger HA reauth
raise ConfigEntryAuthFailed from err2
except ClientResponseError as err2:
# Still HTTP auth errors after refresh → trigger HA reauth
if err2.status in (401, 403):
raise ConfigEntryAuthFailed from err2
raise UpdateFailed(f"HTTP {err2.status}") from err2
except ClientError as err:
# Network issues (timeouts, DNS, etc.)
raise UpdateFailed(f"Network error: {err!r}") from err

View File

@@ -1,10 +1,10 @@
{
"domain": "rituals_perfume_genie",
"name": "Rituals Perfume Genie",
"codeowners": ["@milanmeu", "@frenck"],
"codeowners": ["@milanmeu", "@frenck", "@quebulm"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/rituals_perfume_genie",
"iot_class": "cloud_polling",
"loggers": ["pyrituals"],
"requirements": ["pyrituals==0.0.6"]
"requirements": ["pyrituals==0.0.7"]
}

View File

@@ -1,7 +1,8 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"reauth_successful": "Re-authentication was successful"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
@@ -9,6 +10,12 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"description": "Please enter the correct password."
},
"user": {
"data": {
"email": "[%key:common::config_flow::data::email%]",

View File

@@ -65,11 +65,9 @@ rules:
exception-translations: done
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: todo
comment: The Cloud vs Local API warning should probably be a repair issue.
repair-issues: done
stale-devices: done
# Platinum
async-dependency: todo
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -44,6 +44,7 @@ from .entity import (
)
from .utils import (
async_remove_orphaned_entities,
async_remove_shelly_entity,
format_ble_addr,
get_blu_trv_device_info,
get_device_entry_gen,
@@ -80,6 +81,7 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
device_class=ButtonDeviceClass.RESTART,
entity_category=EntityCategory.CONFIG,
press_action="trigger_reboot",
supported=lambda coordinator: coordinator.sleep_period == 0,
),
ShellyButtonDescription[ShellyBlockCoordinator](
key="self_test",
@@ -197,7 +199,8 @@ async def async_setup_entry(
"""Set up button entities."""
entry_data = config_entry.runtime_data
coordinator: ShellyRpcCoordinator | ShellyBlockCoordinator | None
if get_device_entry_gen(config_entry) in RPC_GENERATIONS:
device_gen = get_device_entry_gen(config_entry)
if device_gen in RPC_GENERATIONS:
coordinator = entry_data.rpc
else:
coordinator = entry_data.block
@@ -210,6 +213,12 @@ async def async_setup_entry(
hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator)
)
# Remove the 'restart' button for sleeping devices as it was mistakenly
# added in https://github.com/home-assistant/core/pull/154673
entry_sleep_period = config_entry.data[CONF_SLEEP_PERIOD]
if device_gen in RPC_GENERATIONS and entry_sleep_period:
async_remove_shelly_entity(hass, BUTTON_PLATFORM, f"{coordinator.mac}-reboot")
entities: list[ShellyButton] = []
entities.extend(
@@ -224,7 +233,7 @@ async def async_setup_entry(
return
# add RPC buttons
if config_entry.data[CONF_SLEEP_PERIOD]:
if entry_sleep_period:
async_setup_entry_rpc(
hass,
config_entry,

View File

@@ -150,6 +150,7 @@ class SunCondition(Condition):
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
super().__init__(hass, config)
assert config.options is not None
self._options = config.options

View File

@@ -64,6 +64,12 @@ from .const import (
_LOGGER = logging.getLogger(__name__)
DESCRIPTION_PLACEHOLDERS: dict[str, str] = {
"botfather_username": "@BotFather",
"botfather_url": "https://t.me/botfather",
"socks_url": "socks5://username:password@proxy_ip:proxy_port",
}
STEP_USER_DATA_SCHEMA: vol.Schema = vol.Schema(
{
vol.Required(CONF_PLATFORM): SelectSelector(
@@ -310,10 +316,7 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle a flow to create a new config entry for a Telegram bot."""
description_placeholders: dict[str, str] = {
"botfather_username": "@BotFather",
"botfather_url": "https://t.me/botfather",
}
description_placeholders: dict[str, str] = DESCRIPTION_PLACEHOLDERS.copy()
if not user_input:
return self.async_show_form(
step_id="user",
@@ -552,13 +555,14 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
},
},
),
description_placeholders=DESCRIPTION_PLACEHOLDERS,
)
user_input[CONF_PROXY_URL] = user_input[SECTION_ADVANCED_SETTINGS].get(
CONF_PROXY_URL
)
errors: dict[str, str] = {}
description_placeholders: dict[str, str] = {}
description_placeholders: dict[str, str] = DESCRIPTION_PLACEHOLDERS.copy()
user_input[CONF_API_KEY] = api_key
bot_name = await self._validate_bot(

View File

@@ -60,7 +60,7 @@
"proxy_url": "Proxy URL"
},
"data_description": {
"proxy_url": "Proxy URL if working behind one, optionally including username and password.\n(socks5://username:password@proxy_ip:proxy_port)"
"proxy_url": "Proxy URL if working behind one, optionally including username and password.\n({socks_url})"
},
"name": "Advanced settings"
}

View File

@@ -635,14 +635,14 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
# Support legacy mireds in template light.
temperature = int(render)
if (min_kelvin := self._attr_min_color_temp_kelvin) is not None:
min_mireds = color_util.color_temperature_kelvin_to_mired(min_kelvin)
else:
min_mireds = DEFAULT_MIN_MIREDS
if (max_kelvin := self._attr_max_color_temp_kelvin) is not None:
max_mireds = color_util.color_temperature_kelvin_to_mired(max_kelvin)
max_mireds = color_util.color_temperature_kelvin_to_mired(min_kelvin)
else:
max_mireds = DEFAULT_MAX_MIREDS
if (max_kelvin := self._attr_max_color_temp_kelvin) is not None:
min_mireds = color_util.color_temperature_kelvin_to_mired(max_kelvin)
else:
min_mireds = DEFAULT_MIN_MIREDS
if min_mireds <= temperature <= max_mireds:
self._attr_color_temp_kelvin = (
color_util.color_temperature_mired_to_kelvin(temperature)
@@ -856,42 +856,36 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
try:
if render in (None, "None", ""):
self._attr_max_mireds = DEFAULT_MAX_MIREDS
self._attr_max_color_temp_kelvin = None
self._attr_min_color_temp_kelvin = None
return
self._attr_max_mireds = max_mireds = int(render)
self._attr_max_color_temp_kelvin = (
color_util.color_temperature_mired_to_kelvin(max_mireds)
self._attr_min_color_temp_kelvin = (
color_util.color_temperature_mired_to_kelvin(int(render))
)
except ValueError:
_LOGGER.exception(
"Template must supply an integer temperature within the range for"
" this light, or 'None'"
)
self._attr_max_mireds = DEFAULT_MAX_MIREDS
self._attr_max_color_temp_kelvin = None
self._attr_min_color_temp_kelvin = None
@callback
def _update_min_mireds(self, render):
"""Update the min mireds from the template."""
try:
if render in (None, "None", ""):
self._attr_min_mireds = DEFAULT_MIN_MIREDS
self._attr_min_color_temp_kelvin = None
self._attr_max_color_temp_kelvin = None
return
self._attr_min_mireds = min_mireds = int(render)
self._attr_min_color_temp_kelvin = (
color_util.color_temperature_mired_to_kelvin(min_mireds)
self._attr_max_color_temp_kelvin = (
color_util.color_temperature_mired_to_kelvin(int(render))
)
except ValueError:
_LOGGER.exception(
"Template must supply an integer temperature within the range for"
" this light, or 'None'"
)
self._attr_min_mireds = DEFAULT_MIN_MIREDS
self._attr_min_color_temp_kelvin = None
self._attr_max_color_temp_kelvin = None
@callback
def _update_supports_transition(self, render):

View File

@@ -19,6 +19,7 @@ from homeassistant.components.light import (
LightEntityDescription,
LightEntityFeature,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.color import rgb_hex_to_rgb_list
@@ -117,6 +118,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiLightEntityDescription, ...] = (
UnifiLightEntityDescription[Devices, Device](
key="LED control",
translation_key="led_control",
entity_category=EntityCategory.CONFIG,
allowed_fn=lambda hub, obj_id: True,
api_handler_fn=lambda api: api.devices,
available_fn=async_device_available_fn,

View File

@@ -40,7 +40,7 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["uiprotect", "unifi_discovery"],
"requirements": ["uiprotect==7.31.0", "unifi-discovery==1.2.0"],
"requirements": ["uiprotect==7.33.2", "unifi-discovery==1.2.0"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

View File

@@ -1914,11 +1914,11 @@
"title": "Zigbee network settings have changed"
},
"wrong_silabs_firmware_installed_nabucasa": {
"description": "Your Zigbee adapter was previously used with multiprotocol (Zigbee and Thread) and still has multiprotocol firmware installed: ({firmware_type}).\n\nTo run your adapter exclusively with ZHA, you need to install the Zigbee firmware:\n - Go to Settings > System > Hardware, select the device and select Configure.\n - Select the 'Migrate Zigbee to a new adapter' option and follow the instructions.",
"title": "Zigbee adapter with multiprotocol firmware detected"
"description": "Your Zigbee adapter is currently in an incorrect state: {firmware_type}.\n\nThe device may have Thread or multiprotocol firmware installed, or it may be stuck in the bootloader. To resolve this, try to unplug the adapter temporarily.\n\nIf the issue persists and you need to install Zigbee firmware:\n - Go to Settings > System > Hardware, select the device and select Configure.\n - Select the 'Migrate Zigbee to a new adapter' option and follow the instructions.",
"title": "Zigbee adapter in incorrect state"
},
"wrong_silabs_firmware_installed_other": {
"description": "Your Zigbee adapter was previously used with multiprotocol (Zigbee and Thread) and still has multiprotocol firmware installed: ({firmware_type}).\n\nTo run your adapter exclusively with ZHA, you need to install Zigbee firmware. Follow your Zigbee adapter manufacturer's instructions for how to do this.",
"description": "Your Zigbee adapter is currently in an incorrect state: {firmware_type}.\n\nThe device may have Thread or multiprotocol firmware installed, or it may be stuck in the bootloader. To resolve this, try to unplug the adapter temporarily.\n\nIf the issue persists and you need to install Zigbee firmware, follow your Zigbee adapter manufacturer's instructions for how to do this.",
"title": "[%key:component::zha::issues::wrong_silabs_firmware_installed_nabucasa::title%]"
}
},

View File

@@ -114,6 +114,7 @@ class ZoneCondition(Condition):
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
super().__init__(hass, config)
assert config.options is not None
self._options = config.options

View File

@@ -259,6 +259,8 @@ _CONDITION_SCHEMA = _CONDITION_BASE_SCHEMA.extend(
class Condition(abc.ABC):
"""Condition class."""
_hass: HomeAssistant
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
@@ -293,6 +295,7 @@ class Condition(abc.ABC):
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
@abc.abstractmethod
async def async_get_checker(self) -> ConditionCheckerType:

View File

@@ -124,6 +124,12 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = {
# Added in 2025.10.0 because of
# https://github.com/frenck/spook/issues/1066
"spook": BlockedIntegration(AwesomeVersion("4.0.0"), "breaks the template engine"),
# Added in 2025.12.1 because of
# https://github.com/JaccoR/hass-entso-e/issues/263
"entsoe": BlockedIntegration(
AwesomeVersion("0.7.1"),
"crashes Home Assistant when it can't connect to the API",
),
}
DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey(

View File

@@ -2,7 +2,7 @@
aiodhcpwatcher==1.2.1
aiodiscover==2.7.1
aiodns==3.5.0
aiodns==3.6.0
aiohasupervisor==0.3.3
aiohttp-asyncmdnsresolver==0.1.1
aiohttp-fast-zlib==0.3.0
@@ -39,7 +39,7 @@ habluetooth==5.8.0
hass-nabucasa==1.7.0
hassil==3.5.0
home-assistant-bluetooth==1.13.1
home-assistant-frontend==20251203.0
home-assistant-frontend==20251203.1
home-assistant-intents==2025.12.2
httpx==0.28.1
ifaddr==0.2.0

View File

@@ -24,7 +24,7 @@ classifiers = [
]
requires-python = ">=3.13.2"
dependencies = [
"aiodns==3.5.0",
"aiodns==3.6.0",
# Integrations may depend on hassio integration without listing it to
# change behavior based on presence of supervisor. Deprecated with #127228
# Lib can be removed with 2025.11

2
requirements.txt generated
View File

@@ -3,7 +3,7 @@
-c homeassistant/package_constraints.txt
# Home Assistant Core
aiodns==3.5.0
aiodns==3.6.0
aiohasupervisor==0.3.3
aiohttp==3.13.2
aiohttp_cors==0.8.1

8
requirements_all.txt generated
View File

@@ -231,7 +231,7 @@ aiodhcpwatcher==1.2.1
aiodiscover==2.7.1
# homeassistant.components.dnsip
aiodns==3.5.0
aiodns==3.6.0
# homeassistant.components.duke_energy
aiodukeenergy==0.3.0
@@ -1204,7 +1204,7 @@ hole==0.9.0
holidays==0.84
# homeassistant.components.frontend
home-assistant-frontend==20251203.0
home-assistant-frontend==20251203.1
# homeassistant.components.conversation
home-assistant-intents==2025.12.2
@@ -2351,7 +2351,7 @@ pyrepetierng==0.1.0
pyrisco==0.6.7
# homeassistant.components.rituals_perfume_genie
pyrituals==0.0.6
pyrituals==0.0.7
# homeassistant.components.thread
pyroute2==0.7.5
@@ -3059,7 +3059,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
uiprotect==7.31.0
uiprotect==7.33.2
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7

View File

@@ -222,7 +222,7 @@ aiodhcpwatcher==1.2.1
aiodiscover==2.7.1
# homeassistant.components.dnsip
aiodns==3.5.0
aiodns==3.6.0
# homeassistant.components.duke_energy
aiodukeenergy==0.3.0
@@ -1062,7 +1062,7 @@ hole==0.9.0
holidays==0.84
# homeassistant.components.frontend
home-assistant-frontend==20251203.0
home-assistant-frontend==20251203.1
# homeassistant.components.conversation
home-assistant-intents==2025.12.2
@@ -1980,7 +1980,7 @@ pyrate-limiter==3.9.0
pyrisco==0.6.7
# homeassistant.components.rituals_perfume_genie
pyrituals==0.0.6
pyrituals==0.0.7
# homeassistant.components.thread
pyroute2==0.7.5
@@ -2547,7 +2547,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
uiprotect==7.31.0
uiprotect==7.33.2
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7

View File

@@ -12,7 +12,13 @@ from pyairobotrest.models import (
import pytest
from homeassistant.components.airobot.const import DOMAIN
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME
from homeassistant.const import (
CONF_HOST,
CONF_MAC,
CONF_PASSWORD,
CONF_USERNAME,
Platform,
)
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
@@ -105,16 +111,24 @@ def mock_config_entry() -> MockConfigEntry:
)
@pytest.fixture
def platforms() -> list[Platform]:
"""Fixture to specify platforms to test."""
return [Platform.CLIMATE, Platform.SENSOR]
@pytest.fixture
async def init_integration(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_airobot_client: AsyncMock,
platforms: list[Platform],
) -> MockConfigEntry:
"""Set up the Airobot integration for testing."""
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
with patch("homeassistant.components.airobot.PLATFORMS", platforms):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
return mock_config_entry

View File

@@ -0,0 +1,220 @@
# serializer version: 1
# name: test_sensors[sensor.test_thermostat_air_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.test_thermostat_air_temperature',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 1,
}),
}),
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
'original_icon': None,
'original_name': 'Air temperature',
'platform': 'airobot',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'air_temperature',
'unique_id': 'T01A1B2C3_air_temperature',
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
})
# ---
# name: test_sensors[sensor.test_thermostat_air_temperature-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'temperature',
'friendly_name': 'Test Thermostat Air temperature',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
}),
'context': <ANY>,
'entity_id': 'sensor.test_thermostat_air_temperature',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '22.0',
})
# ---
# name: test_sensors[sensor.test_thermostat_error_count-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_thermostat_error_count',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Error count',
'platform': 'airobot',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'errors',
'unique_id': 'T01A1B2C3_errors',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[sensor.test_thermostat_error_count-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test Thermostat Error count',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.test_thermostat_error_count',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[sensor.test_thermostat_heating_uptime-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_thermostat_heating_uptime',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 2,
}),
'sensor.private': dict({
'suggested_unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
}),
}),
'original_device_class': <SensorDeviceClass.DURATION: 'duration'>,
'original_icon': None,
'original_name': 'Heating uptime',
'platform': 'airobot',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'heating_uptime',
'unique_id': 'T01A1B2C3_heating_uptime',
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
})
# ---
# name: test_sensors[sensor.test_thermostat_heating_uptime-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'duration',
'friendly_name': 'Test Thermostat Heating uptime',
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
'unit_of_measurement': <UnitOfTime.HOURS: 'h'>,
}),
'context': <ANY>,
'entity_id': 'sensor.test_thermostat_heating_uptime',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '1.38888888888889',
})
# ---
# name: test_sensors[sensor.test_thermostat_humidity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.test_thermostat_humidity',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <SensorDeviceClass.HUMIDITY: 'humidity'>,
'original_icon': None,
'original_name': 'Humidity',
'platform': 'airobot',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': 'T01A1B2C3_humidity',
'unit_of_measurement': '%',
})
# ---
# name: test_sensors[sensor.test_thermostat_humidity-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'humidity',
'friendly_name': 'Test Thermostat Humidity',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.test_thermostat_humidity',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '45.0',
})
# ---

View File

@@ -17,7 +17,7 @@ from homeassistant.components.climate import (
SERVICE_SET_PRESET_MODE,
SERVICE_SET_TEMPERATURE,
)
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.const import ATTR_ENTITY_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
import homeassistant.helpers.entity_registry as er
@@ -25,12 +25,19 @@ import homeassistant.helpers.entity_registry as er
from tests.common import MockConfigEntry, snapshot_platform
@pytest.fixture
def platforms() -> list[Platform]:
"""Fixture to specify platforms to test."""
return [Platform.CLIMATE]
@pytest.mark.usefixtures("init_integration")
async def test_climate_entities(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
mock_config_entry: MockConfigEntry,
platforms: list[Platform],
) -> None:
"""Test climate entities."""
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)

View File

@@ -0,0 +1,38 @@
"""Tests for the Airobot sensor platform."""
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from tests.common import MockConfigEntry, snapshot_platform
@pytest.fixture
def platforms() -> list[Platform]:
"""Fixture to specify platforms to test."""
return [Platform.SENSOR]
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
async def test_sensors(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the sensor entities."""
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
async def test_sensor_availability_without_optional_sensors(
hass: HomeAssistant,
) -> None:
"""Test sensors are not created when optional hardware is not present."""
# Default mock has no floor sensor, CO2, or AQI - they should not be created
assert hass.states.get("sensor.test_thermostat_floor_temperature") is None
assert hass.states.get("sensor.test_thermostat_carbon_dioxide") is None
assert hass.states.get("sensor.test_thermostat_air_quality_index") is None

View File

@@ -37,6 +37,7 @@ from .const import (
TEST_DATA_CREATE_ENTRY,
TEST_DATA_CREATE_ENTRY_2,
TEST_DATA_CREATE_ENTRY_3,
TEST_DATA_CREATE_ENTRY_4,
TEST_FRIENDLY_NAME,
TEST_FRIENDLY_NAME_3,
TEST_FRIENDLY_NAME_4,
@@ -48,10 +49,12 @@ from .const import (
TEST_NAME,
TEST_NAME_2,
TEST_NAME_3,
TEST_NAME_4,
TEST_REMOTE_SERIAL,
TEST_SERIAL_NUMBER,
TEST_SERIAL_NUMBER_2,
TEST_SERIAL_NUMBER_3,
TEST_SERIAL_NUMBER_4,
TEST_SOUND_MODE,
TEST_SOUND_MODE_2,
TEST_SOUND_MODE_NAME,
@@ -93,6 +96,17 @@ def mock_config_entry_premiere() -> MockConfigEntry:
)
@pytest.fixture
def mock_config_entry_a5() -> MockConfigEntry:
"""Mock config entry for Beosound A5."""
return MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_SERIAL_NUMBER_4,
data=TEST_DATA_CREATE_ENTRY_4,
title=TEST_NAME_4,
)
async def mock_websocket_connection(
hass: HomeAssistant, mock_mozart_client: AsyncMock
) -> None:

View File

@@ -42,6 +42,7 @@ TEST_MODEL_CORE = "Beoconnect Core"
TEST_MODEL_PREMIERE = "Beosound Premiere"
TEST_MODEL_THEATRE = "Beosound Theatre"
TEST_MODEL_LEVEL = "Beosound Level"
TEST_MODEL_A5 = "Beosound A5"
TEST_SERIAL_NUMBER = "11111111"
TEST_NAME = f"{TEST_MODEL_BALANCE}-{TEST_SERIAL_NUMBER}"
TEST_FRIENDLY_NAME = "Living room Balance"
@@ -64,9 +65,11 @@ TEST_JID_3 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.{TEST_SERIAL_NUMBER_3}@prod
TEST_MEDIA_PLAYER_ENTITY_ID_3 = f"media_player.beosound_premiere_{TEST_SERIAL_NUMBER_3}"
TEST_HOST_3 = "192.168.0.3"
TEST_FRIENDLY_NAME_4 = "Lounge room Balance"
TEST_JID_4 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.44444444@products.bang-olufsen.com"
TEST_MEDIA_PLAYER_ENTITY_ID_4 = "media_player.beosound_balance_44444444"
TEST_FRIENDLY_NAME_4 = "Lounge room A5"
TEST_SERIAL_NUMBER_4 = "44444444"
TEST_NAME_4 = f"{TEST_MODEL_A5}-{TEST_SERIAL_NUMBER_4}"
TEST_JID_4 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.{TEST_SERIAL_NUMBER_4}@products.bang-olufsen.com"
TEST_MEDIA_PLAYER_ENTITY_ID_4 = f"media_player.beosound_a5_{TEST_SERIAL_NUMBER_4}"
TEST_HOST_4 = "192.168.0.4"
# Beoremote One
@@ -105,6 +108,13 @@ TEST_DATA_CREATE_ENTRY_3 = {
CONF_NAME: TEST_NAME_3,
}
TEST_DATA_CREATE_ENTRY_4 = {
CONF_HOST: TEST_HOST_4,
CONF_MODEL: TEST_MODEL_A5,
CONF_BEOLINK_JID: TEST_JID_4,
CONF_NAME: TEST_NAME_4,
}
TEST_DATA_ZEROCONF = ZeroconfServiceInfo(
ip_address=IPv4Address(TEST_HOST),
ip_addresses=[IPv4Address(TEST_HOST)],

View File

@@ -45,11 +45,11 @@
'beolink': dict({
'listeners': dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',

View File

@@ -1,4 +1,108 @@
# serializer version: 1
# name: test_button_event_creation_a5
list([
'event.beosound_a5_44444444_bluetooth',
'event.beosound_a5_44444444_next',
'event.beosound_a5_44444444_play_pause',
'event.beosound_a5_44444444_favorite_1',
'event.beosound_a5_44444444_favorite_2',
'event.beosound_a5_44444444_favorite_3',
'event.beosound_a5_44444444_favorite_4',
'event.beosound_a5_44444444_previous',
'event.beosound_a5_44444444_volume',
'event.beoremote_one_55555555_44444444_light_blue',
'event.beoremote_one_55555555_44444444_light_digit_0',
'event.beoremote_one_55555555_44444444_light_digit_1',
'event.beoremote_one_55555555_44444444_light_digit_2',
'event.beoremote_one_55555555_44444444_light_digit_3',
'event.beoremote_one_55555555_44444444_light_digit_4',
'event.beoremote_one_55555555_44444444_light_digit_5',
'event.beoremote_one_55555555_44444444_light_digit_6',
'event.beoremote_one_55555555_44444444_light_digit_7',
'event.beoremote_one_55555555_44444444_light_digit_8',
'event.beoremote_one_55555555_44444444_light_digit_9',
'event.beoremote_one_55555555_44444444_light_down',
'event.beoremote_one_55555555_44444444_light_green',
'event.beoremote_one_55555555_44444444_light_left',
'event.beoremote_one_55555555_44444444_light_play',
'event.beoremote_one_55555555_44444444_light_red',
'event.beoremote_one_55555555_44444444_light_rewind',
'event.beoremote_one_55555555_44444444_light_right',
'event.beoremote_one_55555555_44444444_light_select',
'event.beoremote_one_55555555_44444444_light_stop',
'event.beoremote_one_55555555_44444444_light_up',
'event.beoremote_one_55555555_44444444_light_wind',
'event.beoremote_one_55555555_44444444_light_yellow',
'event.beoremote_one_55555555_44444444_light_function_1',
'event.beoremote_one_55555555_44444444_light_function_2',
'event.beoremote_one_55555555_44444444_light_function_3',
'event.beoremote_one_55555555_44444444_light_function_4',
'event.beoremote_one_55555555_44444444_light_function_5',
'event.beoremote_one_55555555_44444444_light_function_6',
'event.beoremote_one_55555555_44444444_light_function_7',
'event.beoremote_one_55555555_44444444_light_function_8',
'event.beoremote_one_55555555_44444444_light_function_9',
'event.beoremote_one_55555555_44444444_light_function_10',
'event.beoremote_one_55555555_44444444_light_function_11',
'event.beoremote_one_55555555_44444444_light_function_12',
'event.beoremote_one_55555555_44444444_light_function_13',
'event.beoremote_one_55555555_44444444_light_function_14',
'event.beoremote_one_55555555_44444444_light_function_15',
'event.beoremote_one_55555555_44444444_light_function_16',
'event.beoremote_one_55555555_44444444_light_function_17',
'event.beoremote_one_55555555_44444444_control_blue',
'event.beoremote_one_55555555_44444444_control_digit_0',
'event.beoremote_one_55555555_44444444_control_digit_1',
'event.beoremote_one_55555555_44444444_control_digit_2',
'event.beoremote_one_55555555_44444444_control_digit_3',
'event.beoremote_one_55555555_44444444_control_digit_4',
'event.beoremote_one_55555555_44444444_control_digit_5',
'event.beoremote_one_55555555_44444444_control_digit_6',
'event.beoremote_one_55555555_44444444_control_digit_7',
'event.beoremote_one_55555555_44444444_control_digit_8',
'event.beoremote_one_55555555_44444444_control_digit_9',
'event.beoremote_one_55555555_44444444_control_down',
'event.beoremote_one_55555555_44444444_control_green',
'event.beoremote_one_55555555_44444444_control_left',
'event.beoremote_one_55555555_44444444_control_play',
'event.beoremote_one_55555555_44444444_control_red',
'event.beoremote_one_55555555_44444444_control_rewind',
'event.beoremote_one_55555555_44444444_control_right',
'event.beoremote_one_55555555_44444444_control_select',
'event.beoremote_one_55555555_44444444_control_stop',
'event.beoremote_one_55555555_44444444_control_up',
'event.beoremote_one_55555555_44444444_control_wind',
'event.beoremote_one_55555555_44444444_control_yellow',
'event.beoremote_one_55555555_44444444_control_function_1',
'event.beoremote_one_55555555_44444444_control_function_2',
'event.beoremote_one_55555555_44444444_control_function_3',
'event.beoremote_one_55555555_44444444_control_function_4',
'event.beoremote_one_55555555_44444444_control_function_5',
'event.beoremote_one_55555555_44444444_control_function_6',
'event.beoremote_one_55555555_44444444_control_function_7',
'event.beoremote_one_55555555_44444444_control_function_8',
'event.beoremote_one_55555555_44444444_control_function_9',
'event.beoremote_one_55555555_44444444_control_function_10',
'event.beoremote_one_55555555_44444444_control_function_11',
'event.beoremote_one_55555555_44444444_control_function_12',
'event.beoremote_one_55555555_44444444_control_function_13',
'event.beoremote_one_55555555_44444444_control_function_14',
'event.beoremote_one_55555555_44444444_control_function_15',
'event.beoremote_one_55555555_44444444_control_function_16',
'event.beoremote_one_55555555_44444444_control_function_17',
'event.beoremote_one_55555555_44444444_control_function_18',
'event.beoremote_one_55555555_44444444_control_function_19',
'event.beoremote_one_55555555_44444444_control_function_20',
'event.beoremote_one_55555555_44444444_control_function_21',
'event.beoremote_one_55555555_44444444_control_function_22',
'event.beoremote_one_55555555_44444444_control_function_23',
'event.beoremote_one_55555555_44444444_control_function_24',
'event.beoremote_one_55555555_44444444_control_function_25',
'event.beoremote_one_55555555_44444444_control_function_26',
'event.beoremote_one_55555555_44444444_control_function_27',
'media_player.beosound_a5_44444444',
])
# ---
# name: test_button_event_creation_balance
list([
'event.beosound_balance_11111111_bluetooth',
@@ -104,9 +208,8 @@
'media_player.beosound_balance_11111111',
])
# ---
# name: test_button_event_creation_beosound_premiere
# name: test_button_event_creation_premiere
list([
'event.beosound_premiere_33333333_microphone',
'event.beosound_premiere_33333333_next',
'event.beosound_premiere_33333333_play_pause',
'event.beosound_premiere_33333333_favorite_1',
@@ -208,7 +311,7 @@
'media_player.beosound_premiere_33333333',
])
# ---
# name: test_no_button_and_remote_key_event_creation
# name: test_no_button_and_remote_key_event_creation_core
list([
'media_player.beoconnect_core_22222222',
])

View File

@@ -5,11 +5,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -53,11 +53,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -102,11 +102,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -151,11 +151,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -200,11 +200,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -249,11 +249,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -297,11 +297,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -345,11 +345,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -393,11 +393,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -441,11 +441,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -489,11 +489,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -537,11 +537,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -585,11 +585,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -634,11 +634,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
@@ -683,11 +683,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -732,11 +732,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
@@ -781,11 +781,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -831,11 +831,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
@@ -880,11 +880,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -929,11 +929,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
@@ -978,11 +978,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -1029,7 +1029,7 @@
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
@@ -1072,11 +1072,11 @@
<BeoAttribute.BEOLINK: 'beolink'>: dict({
<BeoAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
'Lounge room A5': '1111.1111111.44444444@products.bang-olufsen.com',
}),
<BeoAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',

View File

@@ -3,6 +3,7 @@
from unittest.mock import AsyncMock
from mozart_api.models import BeoRemoteButton, ButtonEvent, PairedRemoteResponse
from pytest_unordered import unordered
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.bang_olufsen.const import (
@@ -20,37 +21,64 @@ from .const import (
TEST_BUTTON_EVENT_ENTITY_ID,
TEST_REMOTE_KEY_EVENT_ENTITY_ID,
TEST_SERIAL_NUMBER_3,
TEST_SERIAL_NUMBER_4,
)
from .util import (
get_a5_entity_ids,
get_balance_entity_ids,
get_core_entity_ids,
get_premiere_entity_ids,
get_remote_entity_ids,
)
from .util import get_button_entity_ids, get_remote_entity_ids
from tests.common import MockConfigEntry
async def test_button_event_creation_balance(
async def _check_button_event_creation(
hass: HomeAssistant,
integration: None,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
config_entry: MockConfigEntry,
client: AsyncMock,
entity_ids: list[str],
) -> None:
"""Test button event entities are created when using a Balance (Most devices support all buttons like the Balance)."""
# Add Button Event entity ids
entity_ids: list[str] = [*get_button_entity_ids(), *get_remote_entity_ids()]
"""Test body for entity creation tests."""
# Load entry
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await mock_websocket_connection(hass, client)
# Check that the entities are available
for entity_id in entity_ids:
assert entity_registry.async_get(entity_id)
# Check number of entities
# The media_player entity and all of the button event entities should be the only available
# Check that no entities other than the expected have been created
entity_ids_available = list(entity_registry.entities.keys())
assert len(entity_ids_available) == 1 + len(entity_ids)
# Check snapshot
assert entity_ids_available == unordered(entity_ids)
assert entity_ids_available == snapshot
async def test_no_button_and_remote_key_event_creation(
async def test_button_event_creation_balance(
hass: HomeAssistant,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
mock_config_entry: MockConfigEntry,
mock_mozart_client: AsyncMock,
) -> None:
"""Test button event entities are created when using a Balance (Most devices support all buttons like the Balance)."""
await _check_button_event_creation(
hass,
entity_registry,
snapshot,
mock_config_entry,
mock_mozart_client,
[*get_balance_entity_ids(), *get_remote_entity_ids()],
)
async def test_no_button_and_remote_key_event_creation_core(
hass: HomeAssistant,
mock_config_entry_core: MockConfigEntry,
mock_mozart_client: AsyncMock,
@@ -62,51 +90,58 @@ async def test_no_button_and_remote_key_event_creation(
items=[]
)
# Load entry
mock_config_entry_core.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry_core.entry_id)
await mock_websocket_connection(hass, mock_mozart_client)
# Check number of entities
# The media_player entity should be the only available
entity_ids_available = list(entity_registry.entities.keys())
assert len(entity_ids_available) == 1
# Check snapshot
assert entity_ids_available == snapshot
await _check_button_event_creation(
hass,
entity_registry,
snapshot,
mock_config_entry_core,
mock_mozart_client,
get_core_entity_ids(),
)
async def test_button_event_creation_beosound_premiere(
async def test_button_event_creation_premiere(
hass: HomeAssistant,
mock_config_entry_premiere: MockConfigEntry,
mock_mozart_client: AsyncMock,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
) -> None:
"""Test Bluetooth button event entity is not created when using a Beosound Premiere."""
"""Test Bluetooth and Microphone button event entities are not created when using a Beosound Premiere."""
# Load entry
mock_config_entry_premiere.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry_premiere.entry_id)
await mock_websocket_connection(hass, mock_mozart_client)
await _check_button_event_creation(
hass,
entity_registry,
snapshot,
mock_config_entry_premiere,
mock_mozart_client,
[
*get_premiere_entity_ids(),
*get_remote_entity_ids(device_serial=TEST_SERIAL_NUMBER_3),
],
)
# Add Button Event entity ids
entity_ids = [
*get_button_entity_ids("beosound_premiere_33333333"),
*get_remote_entity_ids(device_serial=TEST_SERIAL_NUMBER_3),
]
entity_ids.remove("event.beosound_premiere_33333333_bluetooth")
# Check that the entities are available
for entity_id in entity_ids:
assert entity_registry.async_get(entity_id)
async def test_button_event_creation_a5(
hass: HomeAssistant,
mock_config_entry_a5: MockConfigEntry,
mock_mozart_client: AsyncMock,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
) -> None:
"""Test Microphone button event entity is not created when using a Beosound A5."""
# Check number of entities
# The media_player entity and all of the button event entities (except Bluetooth) should be the only available
entity_ids_available = list(entity_registry.entities.keys())
assert len(entity_ids_available) == 1 + len(entity_ids)
assert entity_ids_available == snapshot
await _check_button_event_creation(
hass,
entity_registry,
snapshot,
mock_config_entry_a5,
mock_mozart_client,
[
*get_a5_entity_ids(),
*get_remote_entity_ids(device_serial=TEST_SERIAL_NUMBER_4),
],
)
async def test_button(

View File

@@ -10,6 +10,7 @@ from mozart_api.models import (
WebsocketNotificationTag,
)
import pytest
from pytest_unordered import unordered
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.bang_olufsen.const import (
@@ -29,7 +30,7 @@ from .const import (
TEST_REMOTE_SERIAL_PAIRED,
TEST_SERIAL_NUMBER,
)
from .util import get_button_entity_ids, get_remote_entity_ids
from .util import get_balance_entity_ids, get_remote_entity_ids
from tests.common import MockConfigEntry
@@ -133,9 +134,8 @@ async def test_on_remote_control_already_added(
assert device_registry.async_get_device({(DOMAIN, TEST_REMOTE_SERIAL_PAIRED)})
# Check number of entities (remote and button events and media_player)
assert (
len(list(entity_registry.entities.keys()))
== len(get_remote_entity_ids()) + len(get_button_entity_ids()) + 1
assert list(entity_registry.entities.keys()) == unordered(
[*get_balance_entity_ids(), *get_remote_entity_ids()]
)
remote_callback = mock_mozart_client.get_notification_notifications.call_args[0][0]
@@ -152,12 +152,11 @@ async def test_on_remote_control_already_added(
assert mock_mozart_client.get_bluetooth_remotes.call_count == 2
assert device_registry.async_get_device({(DOMAIN, TEST_REMOTE_SERIAL_PAIRED)})
# Check number of entities
# Check number of entities (remote and button events and media_player)
entity_ids_available = list(entity_registry.entities.keys())
assert (
len(entity_ids_available)
== len(get_remote_entity_ids()) + len(get_button_entity_ids()) + 1
assert list(entity_registry.entities.keys()) == unordered(
[*get_balance_entity_ids(), *get_remote_entity_ids()]
)
assert entity_ids_available == snapshot
@@ -180,10 +179,9 @@ async def test_on_remote_control_paired(
assert mock_mozart_client.get_bluetooth_remotes.call_count == 1
assert device_registry.async_get_device({(DOMAIN, TEST_REMOTE_SERIAL_PAIRED)})
# Check number of entities (button events and media_player)
assert (
len(list(entity_registry.entities.keys()))
== len(get_remote_entity_ids()) + len(get_button_entity_ids()) + 1
# Check number of entities (button and remote events and media_player)
assert list(entity_registry.entities.keys()) == unordered(
[*get_balance_entity_ids(), *get_remote_entity_ids()]
)
# "Pair" a new remote
mock_mozart_client.get_bluetooth_remotes.return_value = PairedRemoteResponse(
@@ -234,12 +232,12 @@ async def test_on_remote_control_paired(
# Check number of entities (remote and button events and media_player)
entity_ids_available = list(entity_registry.entities.keys())
assert (
len(entity_ids_available)
== len(get_remote_entity_ids())
+ len(get_remote_entity_ids())
+ len(get_button_entity_ids())
+ 1
assert entity_ids_available == unordered(
[
*get_balance_entity_ids(),
*get_remote_entity_ids(),
*get_remote_entity_ids("66666666"),
]
)
assert entity_ids_available == snapshot
@@ -262,11 +260,11 @@ async def test_on_remote_control_unpaired(
assert mock_mozart_client.get_bluetooth_remotes.call_count == 1
assert device_registry.async_get_device({(DOMAIN, TEST_REMOTE_SERIAL_PAIRED)})
# Check number of entities (button events and media_player)
assert (
len(list(entity_registry.entities.keys()))
== len(get_remote_entity_ids()) + len(get_button_entity_ids()) + 1
# Check number of entities (button and remote events and media_player)
assert list(entity_registry.entities.keys()) == unordered(
[*get_balance_entity_ids(), *get_remote_entity_ids()]
)
# "Unpair" the remote
mock_mozart_client.get_bluetooth_remotes.return_value = PairedRemoteResponse(
items=[]
@@ -296,7 +294,7 @@ async def test_on_remote_control_unpaired(
# Check number of entities (button events and media_player)
entity_ids_available = list(entity_registry.entities.keys())
assert len(entity_ids_available) == +len(get_button_entity_ids()) + 1
assert entity_ids_available == unordered(get_balance_entity_ids())
assert entity_ids_available == snapshot

View File

@@ -10,17 +10,58 @@ from homeassistant.components.bang_olufsen.const import (
DEVICE_BUTTONS,
)
from .const import TEST_REMOTE_SERIAL, TEST_SERIAL_NUMBER
from .const import (
TEST_MEDIA_PLAYER_ENTITY_ID,
TEST_MEDIA_PLAYER_ENTITY_ID_2,
TEST_MEDIA_PLAYER_ENTITY_ID_3,
TEST_MEDIA_PLAYER_ENTITY_ID_4,
TEST_REMOTE_SERIAL,
TEST_SERIAL_NUMBER,
)
def get_button_entity_ids(id_prefix: str = "beosound_balance_11111111") -> list[str]:
"""Return a list of button entity_ids that Mozart devices (except Beoconnect Core and Beosound Premiere) provides."""
def _get_button_entity_ids(id_prefix: str = "beosound_balance_11111111") -> list[str]:
"""Return a list of button entity_ids that Mozart devices provide.
Beoconnect Core, Beosound A5, Beosound A9 and Beosound Premiere do not have (all of the) physical buttons and need filtering.
"""
return [
f"event.{id_prefix}_{underscore(button_type)}".replace("preset", "favorite_")
for button_type in DEVICE_BUTTONS
]
def get_balance_entity_ids() -> list[str]:
"""Return a list of entity_ids that a Beosound Balance provides."""
return [TEST_MEDIA_PLAYER_ENTITY_ID, *_get_button_entity_ids()]
def get_premiere_entity_ids() -> list[str]:
"""Return a list of entity_ids that a Beosound Premiere provides."""
buttons = [
TEST_MEDIA_PLAYER_ENTITY_ID_3,
*_get_button_entity_ids("beosound_premiere_33333333"),
]
buttons.remove("event.beosound_premiere_33333333_bluetooth")
buttons.remove("event.beosound_premiere_33333333_microphone")
return buttons
def get_a5_entity_ids() -> list[str]:
"""Return a list of entity_ids that a Beosound A5 provides."""
buttons = [
TEST_MEDIA_PLAYER_ENTITY_ID_4,
*_get_button_entity_ids("beosound_a5_44444444"),
]
buttons.remove("event.beosound_a5_44444444_microphone")
return buttons
def get_core_entity_ids() -> list[str]:
"""Return a list of entity_ids that a Beoconnect core provides."""
return [TEST_MEDIA_PLAYER_ENTITY_ID_2]
def get_remote_entity_ids(
remote_serial: str = TEST_REMOTE_SERIAL, device_serial: str = TEST_SERIAL_NUMBER
) -> list[str]:

View File

@@ -82,6 +82,10 @@ def patch_doorbird_api_entry_points(api: MagicMock) -> Generator[DoorBird]:
"homeassistant.components.doorbird.config_flow.DoorBird",
return_value=api,
),
patch(
"homeassistant.components.doorbird.device.get_url",
return_value="http://127.0.0.1:8123",
),
):
yield api

View File

@@ -2,15 +2,141 @@
from copy import deepcopy
from http import HTTPStatus
from typing import Any
from doorbirdpy import DoorBirdScheduleEntry
import pytest
from homeassistant.components.doorbird.const import CONF_EVENTS
from homeassistant.components.doorbird.const import (
CONF_EVENTS,
DEFAULT_DOORBELL_EVENT,
DEFAULT_MOTION_EVENT,
DOMAIN,
)
from homeassistant.core import HomeAssistant
from . import VALID_CONFIG
from .conftest import DoorbirdMockerType
from tests.common import MockConfigEntry
@pytest.fixture
def doorbird_favorites_with_stale() -> dict[str, dict[str, Any]]:
"""Return favorites fixture with stale favorites from another HA instance.
Creates favorites where identifier "2" has the same event name as "0"
(mydoorbird_doorbell) but points to a different HA instance URL.
These stale favorites should be filtered out.
"""
return {
"http": {
"0": {
"title": "Home Assistant (mydoorbird_doorbell)",
"value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_doorbell?token=test-token",
},
# Stale favorite from a different HA instance - should be filtered out
"2": {
"title": "Home Assistant (mydoorbird_doorbell)",
"value": "http://old-ha-instance:8123/api/doorbird/mydoorbird_doorbell?token=old-token",
},
"5": {
"title": "Home Assistant (mydoorbird_motion)",
"value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_motion?token=test-token",
},
}
}
@pytest.fixture
def doorbird_schedule_with_stale() -> list[DoorBirdScheduleEntry]:
"""Return schedule fixture with outputs referencing stale favorites.
Both param "0" and "2" map to doorbell input, but "2" is a stale favorite.
"""
schedule_data = [
{
"input": "doorbell",
"param": "1",
"output": [
{
"event": "http",
"param": "0",
"schedule": {"weekdays": [{"to": "107999", "from": "108000"}]},
},
{
"event": "http",
"param": "2",
"schedule": {"weekdays": [{"to": "107999", "from": "108000"}]},
},
],
},
{
"input": "motion",
"param": "",
"output": [
{
"event": "http",
"param": "5",
"schedule": {"weekdays": [{"to": "107999", "from": "108000"}]},
},
],
},
]
return DoorBirdScheduleEntry.parse_all(schedule_data)
async def test_stale_favorites_filtered_by_url(
hass: HomeAssistant,
doorbird_mocker: DoorbirdMockerType,
doorbird_favorites_with_stale: dict[str, dict[str, Any]],
doorbird_schedule_with_stale: list[DoorBirdScheduleEntry],
) -> None:
"""Test that stale favorites from other HA instances are filtered out."""
await doorbird_mocker(
favorites=doorbird_favorites_with_stale,
schedule=doorbird_schedule_with_stale,
)
# Should have 2 event entities - stale favorite "2" is filtered out
# because its URL doesn't match the current HA instance
event_entities = hass.states.async_all("event")
assert len(event_entities) == 2
async def test_custom_url_used_for_favorites(
hass: HomeAssistant,
doorbird_mocker: DoorbirdMockerType,
) -> None:
"""Test that custom URL override is used instead of get_url."""
custom_url = "https://my-custom-url.example.com:8443"
favorites = {
"http": {
"1": {
"title": "Home Assistant (mydoorbird_doorbell)",
"value": f"{custom_url}/api/doorbird/mydoorbird_doorbell?token=test-token",
},
"2": {
"title": "Home Assistant (mydoorbird_motion)",
"value": f"{custom_url}/api/doorbird/mydoorbird_motion?token=test-token",
},
}
}
config_with_custom_url = {
**VALID_CONFIG,
"hass_url_override": custom_url,
}
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="1CCAE3AAAAAA",
data=config_with_custom_url,
options={CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT]},
)
await doorbird_mocker(entry=entry, favorites=favorites)
# Should have 2 event entities using the custom URL
event_entities = hass.states.async_all("event")
assert len(event_entities) == 2
async def test_no_configured_events(
hass: HomeAssistant,

View File

@@ -43,7 +43,7 @@ def mock_update_duckdns() -> Generator[AsyncMock]:
"""Mock _update_duckdns."""
with patch(
"homeassistant.components.duckdns.config_flow._update_duckdns",
"homeassistant.components.duckdns.config_flow.update_duckdns",
return_value=True,
) as mock:
yield mock

View File

@@ -5,15 +5,9 @@ import logging
import pytest
from homeassistant.components.duckdns import (
ATTR_TXT,
BACKOFF_INTERVALS,
DOMAIN,
INTERVAL,
SERVICE_SET_TXT,
UPDATE_URL,
async_track_time_interval_backoff,
)
from homeassistant.components.duckdns import ATTR_TXT, DOMAIN, SERVICE_SET_TXT
from homeassistant.components.duckdns.coordinator import BACKOFF_INTERVALS
from homeassistant.components.duckdns.helpers import UPDATE_URL
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from homeassistant.util.dt import utcnow
@@ -73,12 +67,13 @@ async def test_setup(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -
assert aioclient_mock.call_count == 2
@pytest.mark.freeze_time
async def test_setup_backoff(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
config_entry: MockConfigEntry,
) -> None:
"""Test setup fails if first update fails."""
"""Test update fails with backoffs and recovers."""
aioclient_mock.get(
UPDATE_URL,
params={"domains": TEST_SUBDOMAIN, "token": TEST_TOKEN},
@@ -86,10 +81,10 @@ async def test_setup_backoff(
)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.LOADED
assert config_entry.state is ConfigEntryState.SETUP_RETRY
assert aioclient_mock.call_count == 1
tme = utcnow()
@@ -103,6 +98,17 @@ async def test_setup_backoff(
assert aioclient_mock.call_count == idx + 1
aioclient_mock.clear_requests()
aioclient_mock.get(
UPDATE_URL,
params={"domains": TEST_SUBDOMAIN, "token": TEST_TOKEN},
text="OK",
)
async_fire_time_changed(hass, tme)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.LOADED
@pytest.mark.usefixtures("setup_duckdns")
async def test_service_set_txt(
@@ -147,67 +153,6 @@ async def test_service_clear_txt(
assert aioclient_mock.call_count == 1
async def test_async_track_time_interval_backoff(hass: HomeAssistant) -> None:
"""Test setup fails if first update fails."""
ret_val = False
call_count = 0
tme = None
async def _return(now):
nonlocal call_count, ret_val, tme
if tme is None:
tme = now
call_count += 1
return ret_val
intervals = (
INTERVAL,
INTERVAL * 2,
INTERVAL * 5,
INTERVAL * 9,
INTERVAL * 10,
INTERVAL * 11,
INTERVAL * 12,
)
async_track_time_interval_backoff(hass, _return, intervals)
await hass.async_block_till_done()
assert call_count == 1
_LOGGER.debug("Backoff")
for idx in range(1, len(intervals)):
tme += intervals[idx]
async_fire_time_changed(hass, tme + timedelta(seconds=0.1))
await hass.async_block_till_done()
assert call_count == idx + 1
_LOGGER.debug("Max backoff reached - intervals[-1]")
for _idx in range(1, 10):
tme += intervals[-1]
async_fire_time_changed(hass, tme + timedelta(seconds=0.1))
await hass.async_block_till_done()
assert call_count == idx + 1 + _idx
_LOGGER.debug("Reset backoff")
call_count = 0
ret_val = True
tme += intervals[-1]
async_fire_time_changed(hass, tme + timedelta(seconds=0.1))
await hass.async_block_till_done()
assert call_count == 1
_LOGGER.debug("No backoff - intervals[0]")
for _idx in range(2, 10):
tme += intervals[0]
async_fire_time_changed(hass, tme + timedelta(seconds=0.1))
await hass.async_block_till_done()
assert call_count == _idx
async def test_load_unload(
hass: HomeAssistant,
config_entry: MockConfigEntry,

View File

@@ -59,6 +59,7 @@ MOCK_TRACKER = Tracker(
not_charging=True,
overall=True,
),
icon="http://res.cloudinary.com/iot-venture/image/upload/v1717594357/kyaqq7nfitrdvaoakb8s.jpg",
)

View File

@@ -37,6 +37,7 @@
# name: test_state_entity_device_snapshots[device_tracker.fluffy-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'http://res.cloudinary.com/iot-venture/image/upload/v1717594357/kyaqq7nfitrdvaoakb8s.jpg',
'friendly_name': 'Fluffy',
'gps_accuracy': 10.0,
'latitude': 52.520008,

View File

@@ -0,0 +1,26 @@
{
"version": 2,
"minor_version": 2,
"key": "knx/config_store.json",
"data": {
"entities": {
"sensor": {
"knx_es_01KC2F5CP5S4QCE3FZ49EF7CSJ": {
"entity": {
"name": "Test",
"entity_category": null,
"device_info": null
},
"knx": {
"ga_sensor": {
"state": "1/1/1",
"dpt": "7.600",
"passive": []
},
"sync_state": true
}
}
}
}
}
}

View File

@@ -1442,6 +1442,338 @@
'type': 'result',
})
# ---
# name: test_knx_get_schema[sensor]
dict({
'id': 1,
'result': list([
dict({
'name': 'ga_sensor',
'options': dict({
'dptClasses': list([
'numeric',
'string',
]),
'passive': True,
'state': dict({
'required': True,
}),
'write': False,
}),
'required': True,
'type': 'knx_group_address',
}),
dict({
'collapsible': True,
'name': 'section_advanced_options',
'required': False,
'type': 'knx_section_flat',
}),
dict({
'name': 'unit_of_measurement',
'optional': True,
'required': False,
'selector': dict({
'select': dict({
'custom_value': True,
'mode': 'dropdown',
'multiple': False,
'options': list([
'%',
'A',
'B',
'B/s',
'BTU/(h⋅ft²)',
'Beaufort',
'CCF',
'EB',
'EiB',
'GB',
'GB/s',
'GHz',
'GJ',
'GW',
'GWh',
'Gbit',
'Gbit/s',
'Gcal',
'GiB',
'GiB/s',
'Hz',
'J',
'K',
'KiB',
'KiB/s',
'L',
'L/h',
'L/min',
'L/s',
'MB',
'MB/s',
'MCF',
'MHz',
'MJ',
'MV',
'MW',
'MWh',
'Mbit',
'Mbit/s',
'Mcal',
'MiB',
'MiB/s',
'PB',
'Pa',
'PiB',
'S/cm',
'TB',
'TW',
'TWh',
'TiB',
'V',
'VA',
'W',
'W/m²',
'Wh',
'Wh/km',
'YB',
'YiB',
'ZB',
'ZiB',
'ac',
'bar',
'bit',
'bit/s',
'cal',
'cbar',
'cm',
'cm²',
'd',
'dB',
'dBA',
'dBm',
'fl. oz.',
'ft',
'ft/s',
'ft²',
'ft³',
'ft³/min',
'g',
'g/m³',
'gal',
'gal/d',
'gal/h',
'gal/min',
'h',
'hPa',
'ha',
'in',
'in/d',
'in/h',
'in/s',
'inHg',
'inH₂O',
'in²',
'kB',
'kB/s',
'kHz',
'kJ',
'kPa',
'kV',
'kVA',
'kW',
'kWh',
'kWh/100km',
'kbit',
'kbit/s',
'kcal',
'kg',
'km',
'km/h',
'km/kWh',
'km²',
'kn',
'kvar',
'kvarh',
'lb',
'lx',
'm',
'm/min',
'm/s',
'mA',
'mL',
'mL/s',
'mPa',
'mS/cm',
'mV',
'mVA',
'mW',
'mWh',
'mbar',
'mg',
'mg/dL',
'mg/m³',
'mi',
'mi/kWh',
'min',
'mi²',
'mm',
'mm/d',
'mm/h',
'mm/s',
'mmHg',
'mmol/L',
'mm²',
'mph',
'ms',
'mvar',
'm²',
'm³',
'm³/h',
'm³/min',
'm³/s',
'nmi',
'oz',
'ppb',
'ppm',
'psi',
's',
'st',
'var',
'varh',
'yd',
'yd²',
'°',
'°C',
'°F',
'μS/cm',
'μV',
'μg',
'μg/m³',
'μs',
]),
'sort': False,
'translation_key': 'component.knx.selector.sensor_unit_of_measurement',
}),
}),
'type': 'ha_selector',
}),
dict({
'name': 'device_class',
'optional': True,
'required': False,
'selector': dict({
'select': dict({
'custom_value': False,
'multiple': False,
'options': list([
'date',
'timestamp',
'absolute_humidity',
'apparent_power',
'aqi',
'area',
'atmospheric_pressure',
'battery',
'blood_glucose_concentration',
'carbon_monoxide',
'carbon_dioxide',
'conductivity',
'current',
'data_rate',
'data_size',
'distance',
'duration',
'energy',
'energy_distance',
'energy_storage',
'frequency',
'gas',
'humidity',
'illuminance',
'irradiance',
'moisture',
'monetary',
'nitrogen_dioxide',
'nitrogen_monoxide',
'nitrous_oxide',
'ozone',
'ph',
'pm1',
'pm10',
'pm25',
'pm4',
'power_factor',
'power',
'precipitation',
'precipitation_intensity',
'pressure',
'reactive_energy',
'reactive_power',
'signal_strength',
'sound_pressure',
'speed',
'sulphur_dioxide',
'temperature',
'temperature_delta',
'volatile_organic_compounds',
'volatile_organic_compounds_parts',
'voltage',
'volume',
'volume_storage',
'volume_flow_rate',
'water',
'weight',
'wind_direction',
'wind_speed',
]),
'sort': True,
'translation_key': 'component.knx.selector.sensor_device_class',
}),
}),
'type': 'ha_selector',
}),
dict({
'name': 'state_class',
'optional': True,
'required': False,
'selector': dict({
'select': dict({
'custom_value': False,
'mode': 'dropdown',
'multiple': False,
'options': list([
'measurement',
'measurement_angle',
'total',
'total_increasing',
]),
'sort': False,
'translation_key': 'component.knx.selector.sensor_state_class',
}),
}),
'type': 'ha_selector',
}),
dict({
'name': 'always_callback',
'optional': True,
'required': False,
'selector': dict({
'boolean': dict({
}),
}),
'type': 'ha_selector',
}),
dict({
'allow_false': True,
'default': True,
'name': 'sync_state',
'required': True,
'type': 'knx_sync_state',
}),
]),
'success': True,
'type': 'result',
})
# ---
# name: test_knx_get_schema[switch]
dict({
'id': 1,

View File

@@ -1,6 +1,9 @@
"""Test KNX sensor."""
from typing import Any
from freezegun.api import FrozenDateTimeFactory
import pytest
from homeassistant.components.knx.const import (
ATTR_SOURCE,
@@ -8,9 +11,10 @@ from homeassistant.components.knx.const import (
CONF_SYNC_STATE,
)
from homeassistant.components.knx.schema import SensorSchema
from homeassistant.const import CONF_NAME, CONF_TYPE, STATE_UNKNOWN
from homeassistant.const import CONF_NAME, CONF_TYPE, STATE_UNKNOWN, Platform
from homeassistant.core import HomeAssistant, State
from . import KnxEntityGenerator
from .conftest import KNXTestKit
from tests.common import (
@@ -166,3 +170,135 @@ async def test_always_callback(hass: HomeAssistant, knx: KNXTestKit) -> None:
await knx.receive_write("1/1/1", (0xFA,))
await knx.receive_write("2/2/2", (0xFA,))
assert len(events) == 6
@pytest.mark.parametrize(
("knx_config", "response_payload", "expected_state"),
[
(
{
"ga_sensor": {
"state": "1/1/1",
"passive": [],
"dpt": "9.001", # temperature 2 byte float
},
},
(0, 0),
{
"state": "0.0",
"device_class": "temperature",
"state_class": "measurement",
"unit_of_measurement": "°C",
},
),
(
{
"ga_sensor": {
"state": "1/1/1",
"passive": [],
"dpt": "12", # generic 4byte uint
},
"state_class": "total_increasing",
"device_class": "energy",
"unit_of_measurement": "Mcal",
"sync_state": True,
},
(1, 2, 3, 4),
{
"state": "16909060",
"device_class": "energy",
"state_class": "total_increasing",
},
),
],
)
async def test_sensor_ui_create(
hass: HomeAssistant,
knx: KNXTestKit,
create_ui_entity: KnxEntityGenerator,
knx_config: dict[str, Any],
response_payload: tuple[int, ...],
expected_state: dict[str, Any],
) -> None:
"""Test creating a sensor."""
await knx.setup_integration()
await create_ui_entity(
platform=Platform.SENSOR,
entity_data={"name": "test"},
knx_data=knx_config,
)
# created entity sends read-request to KNX bus
await knx.assert_read("1/1/1")
await knx.receive_response("1/1/1", response_payload)
knx.assert_state("sensor.test", **expected_state)
async def test_sensor_ui_load(knx: KNXTestKit) -> None:
"""Test loading a sensor from storage."""
await knx.setup_integration(config_store_fixture="config_store_sensor.json")
await knx.assert_read("1/1/1", response=(0, 0), ignore_order=True)
knx.assert_state(
"sensor.test",
"0",
device_class=None, # 7.600 color temperature has no sensor device class
state_class="measurement",
unit_of_measurement="K",
)
@pytest.mark.parametrize(
"knx_config",
[
(
{
"ga_sensor": {
"state": "1/1/1",
"passive": [],
"dpt": "9.001", # temperature 2 byte float
},
"state_class": "totoal_increasing", # invalid for temperature
}
),
(
{
"ga_sensor": {
"state": "1/1/1",
"passive": [],
"dpt": "12", # generic 4byte uint
},
"state_class": "total_increasing",
"device_class": "energy", # requires unit_of_measurement
"sync_state": True,
}
),
(
{
"ga_sensor": {
"state": "1/1/1",
"passive": [],
"dpt": "9.001", # temperature 2 byte float
},
"state_class": "measurement_angle", # requires degree unit
"sync_state": True,
}
),
],
)
async def test_sensor_ui_create_attribute_validation(
hass: HomeAssistant,
knx: KNXTestKit,
create_ui_entity: KnxEntityGenerator,
knx_config: dict[str, Any],
) -> None:
"""Test creating a sensor with invalid unit, state_class or device_class."""
await knx.setup_integration()
with pytest.raises(AssertionError) as err:
await create_ui_entity(
platform=Platform.SENSOR,
entity_data={"name": "test"},
knx_data=knx_config,
)
assert "success" in err.value.args[0]
assert "error_base" in err.value.args[0]
assert "path" in err.value.args[0]

View File

@@ -695,3 +695,199 @@ async def test_websocket_backup_timeout_handling(
assert not msg["success"]
assert msg["error"]["code"] == "unknown_error"
async def test_websocket_subscribe_feature(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
) -> None:
"""Test subscribing to a specific preview feature."""
hass.config.components.add("kitchen_sink")
assert await async_setup(hass, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
await client.send_json_auto_id(
{
"type": "labs/subscribe",
"domain": "kitchen_sink",
"preview_feature": "special_repair",
}
)
msg = await client.receive_json()
assert msg["success"]
assert msg["result"] is None
# Initial state is sent as event
event_msg = await client.receive_json()
assert event_msg["type"] == "event"
assert event_msg["event"] == {
"preview_feature": "special_repair",
"domain": "kitchen_sink",
"enabled": False,
"is_built_in": True,
"feedback_url": ANY,
"learn_more_url": ANY,
"report_issue_url": ANY,
}
async def test_websocket_subscribe_feature_receives_updates(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
) -> None:
"""Test that subscription receives updates when feature is toggled."""
hass.config.components.add("kitchen_sink")
assert await async_setup(hass, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
await client.send_json_auto_id(
{
"type": "labs/subscribe",
"domain": "kitchen_sink",
"preview_feature": "special_repair",
}
)
subscribe_msg = await client.receive_json()
assert subscribe_msg["success"]
subscription_id = subscribe_msg["id"]
# Initial state event
initial_event_msg = await client.receive_json()
assert initial_event_msg["id"] == subscription_id
assert initial_event_msg["type"] == "event"
assert initial_event_msg["event"]["enabled"] is False
await client.send_json_auto_id(
{
"type": "labs/update",
"domain": "kitchen_sink",
"preview_feature": "special_repair",
"enabled": True,
}
)
# Update event arrives before the update result
event_msg = await client.receive_json()
assert event_msg["id"] == subscription_id
assert event_msg["type"] == "event"
assert event_msg["event"] == {
"preview_feature": "special_repair",
"domain": "kitchen_sink",
"enabled": True,
"is_built_in": True,
"feedback_url": ANY,
"learn_more_url": ANY,
"report_issue_url": ANY,
}
update_msg = await client.receive_json()
assert update_msg["success"]
async def test_websocket_subscribe_nonexistent_feature(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
) -> None:
"""Test subscribing to a preview feature that doesn't exist."""
assert await async_setup(hass, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
await client.send_json_auto_id(
{
"type": "labs/subscribe",
"domain": "nonexistent",
"preview_feature": "feature",
}
)
msg = await client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "not_found"
assert "not found" in msg["error"]["message"].lower()
async def test_websocket_subscribe_does_not_require_admin(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
hass_admin_user: MockUser,
) -> None:
"""Test that subscribe does not require admin privileges."""
hass_admin_user.groups = []
hass.config.components.add("kitchen_sink")
assert await async_setup(hass, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
await client.send_json_auto_id(
{
"type": "labs/subscribe",
"domain": "kitchen_sink",
"preview_feature": "special_repair",
}
)
msg = await client.receive_json()
assert msg["success"]
# Consume initial state event
await client.receive_json()
async def test_websocket_subscribe_only_receives_subscribed_feature_updates(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
) -> None:
"""Test that subscription only receives updates for the subscribed feature."""
hass.config.components.add("kitchen_sink")
assert await async_setup(hass, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
await client.send_json_auto_id(
{
"type": "labs/subscribe",
"domain": "kitchen_sink",
"preview_feature": "special_repair",
}
)
subscribe_msg = await client.receive_json()
assert subscribe_msg["success"]
# Consume initial state event
await client.receive_json()
# Fire an event for a different feature
hass.bus.async_fire(
EVENT_LABS_UPDATED,
{"domain": "other_domain", "preview_feature": "other_feature", "enabled": True},
)
await hass.async_block_till_done()
await client.send_json_auto_id(
{
"type": "labs/update",
"domain": "kitchen_sink",
"preview_feature": "special_repair",
"enabled": True,
}
)
# Event message arrives before the update result
# Should only receive event for subscribed feature, not the other one
event_msg = await client.receive_json()
assert event_msg["type"] == "event"
assert event_msg["event"]["domain"] == "kitchen_sink"
assert event_msg["event"]["preview_feature"] == "special_repair"
update_msg = await client.receive_json()
assert update_msg["success"]

View File

@@ -10,7 +10,6 @@ import pytest
from homeassistant.components import frontend
from homeassistant.components.lovelace import const, dashboard
from homeassistant.core import HomeAssistant
from homeassistant.helpers import issue_registry as ir
from homeassistant.setup import async_setup_component
from tests.common import assert_setup_component, async_capture_events
@@ -30,236 +29,111 @@ def mock_onboarding_done() -> Generator[MagicMock]:
yield mock_onboarding
async def test_lovelace_from_storage_new_installation(
async def test_lovelace_from_storage(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
hass_storage: dict[str, Any],
) -> None:
"""Test new installation has default lovelace panel but no dashboard entry."""
"""Test we load lovelace config from storage."""
assert await async_setup_component(hass, "lovelace", {})
# Default lovelace panel is registered for frontend availability
assert "lovelace" in hass.data[frontend.DATA_PANELS]
assert hass.data[frontend.DATA_PANELS]["lovelace"].config == {"mode": "storage"}
client = await hass_ws_client(hass)
# Dashboards list should be empty (no dashboard entry created)
await client.send_json({"id": 5, "type": "lovelace/dashboards/list"})
# Fetch data
await client.send_json({"id": 5, "type": "lovelace/config"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == []
async def test_lovelace_from_storage_migration(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
hass_storage: dict[str, Any],
) -> None:
"""Test we migrate existing lovelace config from storage to dashboard."""
# Pre-populate storage with existing lovelace config
hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT] = {
"version": 1,
"key": dashboard.CONFIG_STORAGE_KEY_DEFAULT,
"data": {"config": {"views": [{"title": "Home"}]}},
}
assert await async_setup_component(hass, "lovelace", {})
# After migration, lovelace panel should be registered as a dashboard
assert "lovelace" in hass.data[frontend.DATA_PANELS]
assert hass.data[frontend.DATA_PANELS]["lovelace"].config == {"mode": "storage"}
client = await hass_ws_client(hass)
# Dashboard should be in the list
await client.send_json({"id": 5, "type": "lovelace/dashboards/list"})
response = await client.receive_json()
assert response["success"]
assert len(response["result"]) == 1
assert response["result"][0]["url_path"] == "lovelace"
assert response["result"][0]["title"] == "Overview"
# Fetch migrated config
await client.send_json({"id": 6, "type": "lovelace/config", "url_path": "lovelace"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == {"views": [{"title": "Home"}]}
# Old storage key should be gone, new one should exist
assert dashboard.CONFIG_STORAGE_KEY_DEFAULT not in hass_storage
assert dashboard.CONFIG_STORAGE_KEY.format("lovelace") in hass_storage
assert not response["success"]
assert response["error"]["code"] == "config_not_found"
# Store new config
events = async_capture_events(hass, const.EVENT_LOVELACE_UPDATED)
await client.send_json(
{
"id": 7,
"type": "lovelace/config/save",
"url_path": "lovelace",
"config": {"yo": "hello"},
}
{"id": 6, "type": "lovelace/config/save", "config": {"yo": "hello"}}
)
response = await client.receive_json()
assert response["success"]
assert hass_storage[dashboard.CONFIG_STORAGE_KEY.format("lovelace")]["data"] == {
assert hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT]["data"] == {
"config": {"yo": "hello"}
}
assert len(events) == 1
# Load new config
await client.send_json({"id": 8, "type": "lovelace/config", "url_path": "lovelace"})
await client.send_json({"id": 7, "type": "lovelace/config"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == {"yo": "hello"}
# Test with recovery mode
hass.config.recovery_mode = True
await client.send_json({"id": 9, "type": "lovelace/config", "url_path": "lovelace"})
await client.send_json({"id": 8, "type": "lovelace/config"})
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "config_not_found"
await client.send_json(
{
"id": 10,
"type": "lovelace/config/save",
"url_path": "lovelace",
"config": {"yo": "hello"},
}
{"id": 9, "type": "lovelace/config/save", "config": {"yo": "hello"}}
)
response = await client.receive_json()
assert not response["success"]
await client.send_json(
{"id": 11, "type": "lovelace/config/delete", "url_path": "lovelace"}
)
await client.send_json({"id": 10, "type": "lovelace/config/delete"})
response = await client.receive_json()
assert not response["success"]
async def test_lovelace_dashboard_deleted_re_registers_panel(
async def test_lovelace_from_storage_save_before_load(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
hass_storage: dict[str, Any],
) -> None:
"""Test deleting the lovelace dashboard re-registers the default panel."""
# Pre-populate storage with existing lovelace config (triggers migration)
hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT] = {
"version": 1,
"key": dashboard.CONFIG_STORAGE_KEY_DEFAULT,
"data": {"config": {"views": [{"title": "Home"}]}},
}
"""Test we can load lovelace config from storage."""
assert await async_setup_component(hass, "lovelace", {})
# After migration, lovelace panel should be registered as a dashboard
assert "lovelace" in hass.data[frontend.DATA_PANELS]
client = await hass_ws_client(hass)
# Dashboard should be in the list
await client.send_json({"id": 5, "type": "lovelace/dashboards/list"})
response = await client.receive_json()
assert response["success"]
assert len(response["result"]) == 1
dashboard_id = response["result"][0]["id"]
# Delete the lovelace dashboard
# Store new config
await client.send_json(
{"id": 6, "type": "lovelace/dashboards/delete", "dashboard_id": dashboard_id}
{"id": 6, "type": "lovelace/config/save", "config": {"yo": "hello"}}
)
response = await client.receive_json()
assert response["success"]
# Dashboard should be gone from the list
await client.send_json({"id": 7, "type": "lovelace/dashboards/list"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == []
# But the lovelace panel should still be registered (re-registered as default)
assert "lovelace" in hass.data[frontend.DATA_PANELS]
assert hass.data[frontend.DATA_PANELS]["lovelace"].config == {"mode": "storage"}
assert hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT]["data"] == {
"config": {"yo": "hello"}
}
async def test_lovelace_migration_skipped_when_both_files_exist(
async def test_lovelace_from_storage_delete(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
hass_storage: dict[str, Any],
) -> None:
"""Test migration is skipped when both old and new storage files exist."""
# Pre-populate both old and new storage (simulating incomplete migration)
hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT] = {
"version": 1,
"key": dashboard.CONFIG_STORAGE_KEY_DEFAULT,
"data": {"config": {"views": [{"title": "Old"}]}},
}
hass_storage[dashboard.CONFIG_STORAGE_KEY.format("lovelace")] = {
"version": 1,
"key": dashboard.CONFIG_STORAGE_KEY.format("lovelace"),
"data": {"config": {"views": [{"title": "New"}]}},
}
"""Test we delete lovelace config from storage."""
assert await async_setup_component(hass, "lovelace", {})
# No dashboard should be created (migration skipped)
client = await hass_ws_client(hass)
await client.send_json({"id": 5, "type": "lovelace/dashboards/list"})
# Store new config
await client.send_json(
{"id": 6, "type": "lovelace/config/save", "config": {"yo": "hello"}}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == []
async def test_lovelace_migration_skipped_when_already_migrated(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
hass_storage: dict[str, Any],
) -> None:
"""Test migration is skipped when dashboard already exists."""
# Pre-populate dashboards with existing lovelace dashboard
hass_storage[dashboard.DASHBOARDS_STORAGE_KEY] = {
"version": 1,
"key": dashboard.DASHBOARDS_STORAGE_KEY,
"data": {
"items": [
{
"id": "lovelace",
"url_path": "lovelace",
"title": "Overview",
"icon": "mdi:view-dashboard",
"show_in_sidebar": True,
"require_admin": False,
"mode": "storage",
}
]
},
}
hass_storage[dashboard.CONFIG_STORAGE_KEY.format("lovelace")] = {
"version": 1,
"key": dashboard.CONFIG_STORAGE_KEY.format("lovelace"),
"data": {"config": {"views": [{"title": "Home"}]}},
}
# Also have old file (should be ignored since dashboard exists)
hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT] = {
"version": 1,
"key": dashboard.CONFIG_STORAGE_KEY_DEFAULT,
"data": {"config": {"views": [{"title": "Old"}]}},
assert hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT]["data"] == {
"config": {"yo": "hello"}
}
assert await async_setup_component(hass, "lovelace", {})
client = await hass_ws_client(hass)
await client.send_json({"id": 5, "type": "lovelace/dashboards/list"})
# Delete config
await client.send_json({"id": 7, "type": "lovelace/config/delete"})
response = await client.receive_json()
assert response["success"]
# Only the pre-existing dashboard, no duplicate
assert len(response["result"]) == 1
assert response["result"][0]["url_path"] == "lovelace"
assert dashboard.CONFIG_STORAGE_KEY_DEFAULT not in hass_storage
# Old storage should still exist (not touched)
assert dashboard.CONFIG_STORAGE_KEY_DEFAULT in hass_storage
# Fetch data
await client.send_json({"id": 8, "type": "lovelace/config"})
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "config_not_found"
async def test_lovelace_from_yaml(
@@ -352,24 +226,6 @@ async def test_lovelace_from_yaml(
assert len(events) == 2
async def test_lovelace_from_yaml_creates_repair_issue(
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
) -> None:
"""Test YAML mode creates a repair issue."""
assert await async_setup_component(hass, "lovelace", {"lovelace": {"mode": "YAML"}})
# Panel should still be registered for backwards compatibility
assert hass.data[frontend.DATA_PANELS]["lovelace"].config == {"mode": "yaml"}
# Repair issue should be created with 6-month deadline
issue_registry = ir.async_get(hass)
issue = issue_registry.async_get_issue("lovelace", "yaml_mode_deprecated")
assert issue is not None
assert issue.severity == ir.IssueSeverity.WARNING
assert issue.is_fixable is False
assert issue.breaks_in_ha_version == "2026.6.0"
@pytest.mark.parametrize("url_path", ["test-panel", "test-panel-no-sidebar"])
async def test_dashboard_from_yaml(
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, url_path
@@ -508,9 +364,6 @@ async def test_storage_dashboards(
) -> None:
"""Test we load lovelace config from storage."""
assert await async_setup_component(hass, "lovelace", {})
# Default lovelace panel is registered for frontend availability
assert "lovelace" in hass.data[frontend.DATA_PANELS]
assert hass.data[frontend.DATA_PANELS]["lovelace"].config == {"mode": "storage"}
client = await hass_ws_client(hass)

View File

@@ -34,12 +34,11 @@ async def test_system_health_info_autogen(hass: HomeAssistant) -> None:
assert info == {"dashboards": 1, "mode": "auto-gen", "resources": 0}
async def test_system_health_info_storage_migration(
async def test_system_health_info_storage(
hass: HomeAssistant, hass_storage: dict[str, Any]
) -> None:
"""Test system health info endpoint after migration from old storage."""
"""Test system health info endpoint."""
assert await async_setup_component(hass, "system_health", {})
# Pre-populate old storage format (triggers migration)
hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT] = {
"key": "lovelace",
"version": 1,
@@ -48,8 +47,7 @@ async def test_system_health_info_storage_migration(
assert await async_setup_component(hass, "lovelace", {})
await hass.async_block_till_done()
info = await get_system_health_info(hass, "lovelace")
# After migration: default dashboard (auto-gen) + migrated "lovelace" dashboard (storage with data)
assert info == {"dashboards": 2, "mode": "storage", "resources": 0, "views": 0}
assert info == {"dashboards": 1, "mode": "storage", "resources": 0, "views": 0}
async def test_system_health_info_yaml(hass: HomeAssistant) -> None:

View File

@@ -54,6 +54,57 @@
'state': '3.5',
})
# ---
# name: test_setup_and_update[packet_loss]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.10_10_10_10_packet_loss',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Packet loss',
'platform': 'ping',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'loss',
'unit_of_measurement': '%',
})
# ---
# name: test_setup_and_update[packet_loss].1
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '10.10.10.10 Packet loss',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.10_10_10_10_packet_loss',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0.0',
})
# ---
# name: test_setup_and_update[round_trip_time_average]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -17,6 +17,7 @@ from homeassistant.helpers import entity_registry as er
"round_trip_time_mean_deviation", # should be None in the snapshot
"round_trip_time_minimum",
"jitter",
"packet_loss",
],
)
async def test_setup_and_update(

View File

@@ -4,8 +4,9 @@ from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock, patch
from homeassistant.components.rituals_perfume_genie.const import ACCOUNT_HASH, DOMAIN
from homeassistant.components.rituals_perfume_genie.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, load_json_object_fixture
@@ -17,7 +18,11 @@ def mock_config_entry(unique_id: str, entry_id: str = "an_entry_id") -> MockConf
domain=DOMAIN,
title="name@example.com",
unique_id=unique_id,
data={ACCOUNT_HASH: "an_account_hash"},
data={
CONF_EMAIL: "test@rituals.com",
CONF_PASSWORD: "test-password",
},
version=2,
entry_id=entry_id,
)
@@ -90,13 +95,15 @@ async def init_integration(
"""Initialize the Rituals Perfume Genie integration with the given Config Entry and Diffuser list."""
mock_config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.rituals_perfume_genie.Account.get_devices",
return_value=mock_diffusers,
):
"homeassistant.components.rituals_perfume_genie.Account"
) as mock_account_cls:
mock_account = mock_account_cls.return_value
mock_account.authenticate = AsyncMock()
mock_account.get_devices = AsyncMock(return_value=mock_diffusers)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert mock_config_entry.state is ConfigEntryState.LOADED
assert mock_config_entry.entry_id in hass.data[DOMAIN]
assert hass.data[DOMAIN]
await hass.async_block_till_done()

View File

@@ -0,0 +1,64 @@
"""Fixtures for Rituals Perfume Genie tests."""
from collections.abc import Generator
from unittest.mock import AsyncMock, patch
import pytest
from homeassistant.components.rituals_perfume_genie import ACCOUNT_HASH, DOMAIN
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from .const import TEST_EMAIL, TEST_PASSWORD
from tests.common import MockConfigEntry
@pytest.fixture
def mock_setup_entry() -> Generator[AsyncMock]:
"""Override async_setup_entry."""
with patch(
"homeassistant.components.rituals_perfume_genie.async_setup_entry",
return_value=True,
) as mock:
yield mock
@pytest.fixture
def mock_rituals_account() -> Generator[AsyncMock]:
"""Mock Rituals Account."""
with (
patch(
"homeassistant.components.rituals_perfume_genie.config_flow.Account",
autospec=True,
) as mock_account_cls,
patch(
"homeassistant.components.rituals_perfume_genie.Account",
new=mock_account_cls,
),
):
mock_account = mock_account_cls.return_value
yield mock_account
@pytest.fixture
def mock_config_entry() -> MockConfigEntry:
"""Mock Rituals Account."""
return MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_EMAIL,
data={CONF_EMAIL: TEST_EMAIL, CONF_PASSWORD: TEST_PASSWORD},
title=TEST_EMAIL,
version=2,
)
@pytest.fixture
def old_mock_config_entry() -> MockConfigEntry:
"""Mock Rituals Account."""
return MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_EMAIL,
data={ACCOUNT_HASH: "old_hash_should_be_removed"},
title=TEST_EMAIL,
version=1,
)

View File

@@ -0,0 +1,4 @@
"""Constants for rituals_perfume_genie tests."""
TEST_EMAIL = "test@rituals.com"
TEST_PASSWORD = "test-password"

Some files were not shown because too many files have changed in this diff Show More