mirror of
https://github.com/home-assistant/core.git
synced 2025-12-07 16:38:07 +00:00
Compare commits
40 Commits
whirlpool_
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4aa3f0a400 | ||
|
|
0b52c806d4 | ||
|
|
bbe27d86a1 | ||
|
|
fb7941df1d | ||
|
|
c46e341941 | ||
|
|
2e3a9e3a90 | ||
|
|
55c5ecd28a | ||
|
|
e50e2487e1 | ||
|
|
74e118f85c | ||
|
|
39a62ec2f6 | ||
|
|
1310efcb07 | ||
|
|
53af592c2c | ||
|
|
023987b805 | ||
|
|
5b8fb607b4 | ||
|
|
252f6716ff | ||
|
|
bf78e28f83 | ||
|
|
22706d02a7 | ||
|
|
5cff0e946a | ||
|
|
6cbe2ed279 | ||
|
|
fb0f5f52b2 | ||
|
|
5c422bb770 | ||
|
|
fd1bc07b8c | ||
|
|
97a019d313 | ||
|
|
8ae8a564c2 | ||
|
|
2f72f57bb7 | ||
|
|
e928e3cb54 | ||
|
|
b0e2109e15 | ||
|
|
b449c6673f | ||
|
|
877ad38ac3 | ||
|
|
229f45feae | ||
|
|
a535d1f4eb | ||
|
|
d4adc00ae6 | ||
|
|
ba141f9d1d | ||
|
|
72be9793a4 | ||
|
|
5ae7cc5f84 | ||
|
|
d01a469b46 | ||
|
|
9f07052874 | ||
|
|
b9bc9d3fc2 | ||
|
|
1e180cd5ee | ||
|
|
dc9cdd13b1 |
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1354,8 +1354,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ring/ @sdb9696
|
||||
/homeassistant/components/risco/ @OnFreund
|
||||
/tests/components/risco/ @OnFreund
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/homeassistant/components/rmvtransport/ @cgtobi
|
||||
/tests/components/rmvtransport/ @cgtobi
|
||||
/homeassistant/components/roborock/ @Lash-L @allenporter
|
||||
|
||||
@@ -30,6 +30,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -68,34 +69,19 @@ class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
account_number=user_input.get(CONF_ACCOUNT_NUMBER),
|
||||
account_number=user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
)
|
||||
if isinstance(validation_response, BaseAuth):
|
||||
account_number = (
|
||||
user_input.get(CONF_ACCOUNT_NUMBER)
|
||||
or validation_response.account_number
|
||||
)
|
||||
await self.async_set_unique_id(account_number)
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=account_number,
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
CONF_ACCOUNT_NUMBER: account_number,
|
||||
},
|
||||
)
|
||||
if validation_response == "smart_meter_unavailable":
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
),
|
||||
errors={"base": validation_response},
|
||||
)
|
||||
errors["base"] = validation_response
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyanglianwater"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyanglianwater==2.1.0"]
|
||||
}
|
||||
|
||||
@@ -102,6 +102,12 @@ class ConfiguredDoorBird:
|
||||
"""Get token for device."""
|
||||
return self._token
|
||||
|
||||
def _get_hass_url(self) -> str:
|
||||
"""Get the Home Assistant URL for this device."""
|
||||
if custom_url := self.custom_url:
|
||||
return custom_url
|
||||
return get_url(self._hass, prefer_external=False)
|
||||
|
||||
async def async_register_events(self) -> None:
|
||||
"""Register events on device."""
|
||||
if not self.door_station_events:
|
||||
@@ -146,13 +152,7 @@ class ConfiguredDoorBird:
|
||||
|
||||
async def _async_register_events(self) -> dict[str, Any]:
|
||||
"""Register events on device."""
|
||||
# Override url if another is specified in the configuration
|
||||
if custom_url := self.custom_url:
|
||||
hass_url = custom_url
|
||||
else:
|
||||
# Get the URL of this server
|
||||
hass_url = get_url(self._hass, prefer_external=False)
|
||||
|
||||
hass_url = self._get_hass_url()
|
||||
http_fav = await self._async_get_http_favorites()
|
||||
if any(
|
||||
# Note that a list comp is used here to ensure all
|
||||
@@ -191,10 +191,14 @@ class ConfiguredDoorBird:
|
||||
self._get_event_name(event): event_type
|
||||
for event, event_type in DEFAULT_EVENT_TYPES
|
||||
}
|
||||
hass_url = self._get_hass_url()
|
||||
for identifier, data in http_fav.items():
|
||||
title: str | None = data.get("title")
|
||||
if not title or not title.startswith("Home Assistant"):
|
||||
continue
|
||||
value: str | None = data.get("value")
|
||||
if not value or not value.startswith(hass_url):
|
||||
continue # Not our favorite - different HA instance or stale
|
||||
event = title.partition("(")[2].strip(")")
|
||||
if input_type := favorite_input_type.get(identifier):
|
||||
events.append(DoorbirdEvent(event, input_type))
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["evohome", "evohomeasync", "evohomeasync2"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["evohome-async==1.0.5"]
|
||||
"requirements": ["evohome-async==1.0.6"]
|
||||
}
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251203.0"]
|
||||
"requirements": ["home-assistant-frontend==20251203.1"]
|
||||
}
|
||||
|
||||
@@ -149,6 +149,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
description_placeholders={"example_image_path": "/config/www/image.jpg"},
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.5-flash"
|
||||
RECOMMENDED_STT_MODEL = RECOMMENDED_CHAT_MODEL
|
||||
RECOMMENDED_TTS_MODEL = "models/gemini-2.5-flash-preview-tts"
|
||||
RECOMMENDED_IMAGE_MODEL = "models/gemini-2.5-flash-image-preview"
|
||||
RECOMMENDED_IMAGE_MODEL = "models/gemini-2.5-flash-image"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_TOP_P = "top_p"
|
||||
|
||||
@@ -162,7 +162,7 @@
|
||||
"fields": {
|
||||
"filenames": {
|
||||
"description": "Attachments to add to the prompt (images, PDFs, etc)",
|
||||
"example": "/config/www/image.jpg",
|
||||
"example": "{example_image_path}",
|
||||
"name": "Attachment filenames"
|
||||
},
|
||||
"prompt": {
|
||||
|
||||
@@ -159,4 +159,5 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
_async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
description_placeholders={"example_image_path": "/config/www/image.jpg"},
|
||||
)
|
||||
|
||||
@@ -92,7 +92,7 @@
|
||||
},
|
||||
"filename": {
|
||||
"description": "Path to the image or video to upload.",
|
||||
"example": "/config/www/image.jpg",
|
||||
"example": "{example_image_path}",
|
||||
"name": "Filename"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7,11 +7,10 @@ in the Home Assistant Labs UI for users to enable or disable.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -19,6 +18,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .helpers import async_is_preview_feature_enabled, async_listen
|
||||
from .models import (
|
||||
EventLabsUpdatedData,
|
||||
LabPreviewFeature,
|
||||
@@ -135,55 +135,3 @@ async def _async_scan_all_preview_features(
|
||||
|
||||
_LOGGER.debug("Loaded %d total lab preview features", len(preview_features))
|
||||
return preview_features
|
||||
|
||||
|
||||
@callback
|
||||
def async_is_preview_feature_enabled(
|
||||
hass: HomeAssistant, domain: str, preview_feature: str
|
||||
) -> bool:
|
||||
"""Check if a lab preview feature is enabled.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
|
||||
Returns:
|
||||
True if the preview feature is enabled, False otherwise
|
||||
"""
|
||||
if LABS_DATA not in hass.data:
|
||||
return False
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Callback to invoke when the preview feature is toggled
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
|
||||
63
homeassistant/components/labs/helpers.py
Normal file
63
homeassistant/components/labs/helpers.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Helper functions for the Home Assistant Labs integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@callback
|
||||
def async_is_preview_feature_enabled(
|
||||
hass: HomeAssistant, domain: str, preview_feature: str
|
||||
) -> bool:
|
||||
"""Check if a lab preview feature is enabled.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
|
||||
Returns:
|
||||
True if the preview feature is enabled, False otherwise
|
||||
"""
|
||||
if LABS_DATA not in hass.data:
|
||||
return False
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Callback to invoke when the preview feature is toggled
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
@@ -12,6 +12,7 @@ from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .helpers import async_is_preview_feature_enabled, async_listen
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@@ -20,6 +21,7 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the number websocket API."""
|
||||
websocket_api.async_register_command(hass, websocket_list_preview_features)
|
||||
websocket_api.async_register_command(hass, websocket_update_preview_feature)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_feature)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -108,3 +110,52 @@ async def websocket_update_preview_feature(
|
||||
hass.bus.async_fire(EVENT_LABS_UPDATED, event_data)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "labs/subscribe",
|
||||
vol.Required("domain"): str,
|
||||
vol.Required("preview_feature"): str,
|
||||
}
|
||||
)
|
||||
def websocket_subscribe_feature(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a specific lab preview feature updates."""
|
||||
domain = msg["domain"]
|
||||
preview_feature_key = msg["preview_feature"]
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
|
||||
preview_feature_id = f"{domain}.{preview_feature_key}"
|
||||
|
||||
if preview_feature_id not in labs_data.preview_features:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.ERR_NOT_FOUND,
|
||||
f"Preview feature {preview_feature_id} not found",
|
||||
)
|
||||
return
|
||||
|
||||
preview_feature = labs_data.preview_features[preview_feature_id]
|
||||
|
||||
@callback
|
||||
def send_event() -> None:
|
||||
"""Send feature state to client."""
|
||||
enabled = async_is_preview_feature_enabled(hass, domain, preview_feature_key)
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"],
|
||||
preview_feature.to_dict(enabled=enabled),
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_listen(
|
||||
hass, domain, preview_feature_key, send_event
|
||||
)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
send_event()
|
||||
|
||||
@@ -108,6 +108,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_MESSAGE,
|
||||
_async_service_message,
|
||||
schema=SERVICE_MESSAGE_SCHEMA,
|
||||
description_placeholders={"icons_url": "https://developer.lametric.com/icons"},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -211,7 +211,7 @@
|
||||
"name": "[%key:common::config_flow::data::device%]"
|
||||
},
|
||||
"icon": {
|
||||
"description": "The ID number of the icon or animation to display. List of all icons and their IDs can be found at: https://developer.lametric.com/icons.",
|
||||
"description": "The ID number of the icon or animation to display. List of all icons and their IDs can be found at: {icons_url}.",
|
||||
"name": "Icon ID"
|
||||
},
|
||||
"icon_type": {
|
||||
|
||||
@@ -129,4 +129,5 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
description_placeholders={"example_image_path": "/config/www/image.jpg"},
|
||||
)
|
||||
|
||||
@@ -156,7 +156,7 @@
|
||||
},
|
||||
"filename": {
|
||||
"description": "Path to the file to upload.",
|
||||
"example": "/config/www/image.jpg",
|
||||
"example": "{example_image_path}",
|
||||
"name": "Filename"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["oralb_ble"],
|
||||
"requirements": ["oralb-ble==0.17.6"]
|
||||
"requirements": ["oralb-ble==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import track_point_in_utc_time
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -101,7 +102,18 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
except OSError:
|
||||
_LOGGER.error("Pilight send failed for %s", str(message_data))
|
||||
|
||||
hass.services.register(DOMAIN, SERVICE_NAME, send_code, schema=RF_CODE_SCHEMA)
|
||||
def _register_service() -> None:
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_NAME,
|
||||
send_code,
|
||||
schema=RF_CODE_SCHEMA,
|
||||
description_placeholders={
|
||||
"pilight_protocols_docs_url": "https://manual.pilight.org/protocols/index.html"
|
||||
},
|
||||
)
|
||||
|
||||
run_callback_threadsafe(hass.loop, _register_service).result()
|
||||
|
||||
# Publish received codes on the HA event bus
|
||||
# A whitelist of codes to be published in the event bus
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"description": "Sends RF code to Pilight device.",
|
||||
"fields": {
|
||||
"protocol": {
|
||||
"description": "Protocol that Pilight recognizes. See https://manual.pilight.org/protocols/index.html for supported protocols and additional parameters that each protocol supports.",
|
||||
"description": "Protocol that Pilight recognizes. See {pilight_protocols_docs_url} for supported protocols and additional parameters that each protocol supports.",
|
||||
"name": "Protocol"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -54,8 +54,11 @@ from .const import (
|
||||
)
|
||||
from .coordinator import RainMachineDataUpdateCoordinator
|
||||
|
||||
DEFAULT_SSL = True
|
||||
API_URL_REFERENCE = (
|
||||
"https://rainmachine.docs.apiary.io/#reference/weather-services/parserdata/post"
|
||||
)
|
||||
|
||||
DEFAULT_SSL = True
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -455,7 +458,15 @@ async def async_setup_entry( # noqa: C901
|
||||
):
|
||||
if hass.services.has_service(DOMAIN, service_name):
|
||||
continue
|
||||
hass.services.async_register(DOMAIN, service_name, method, schema=schema)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
service_name,
|
||||
method,
|
||||
schema=schema,
|
||||
description_placeholders={
|
||||
"api_url": API_URL_REFERENCE,
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -128,7 +128,7 @@
|
||||
"name": "Push flow meter data"
|
||||
},
|
||||
"push_weather_data": {
|
||||
"description": "Sends weather data from Home Assistant to the RainMachine device.\nLocal Weather Push service should be enabled from Settings > Weather > Developer tab for RainMachine to consider the values being sent. Units must be sent in metric; no conversions are performed by the integration.\nSee details of RainMachine API here: https://rainmachine.docs.apiary.io/#reference/weather-services/parserdata/post.",
|
||||
"description": "Sends weather data from Home Assistant to the RainMachine device.\nLocal Weather Push service should be enabled from Settings > Weather > Developer tab for RainMachine to consider the values being sent. Units must be sent in metric; no conversions are performed by the integration.\nSee details of RainMachine API here: {api_url}",
|
||||
"fields": {
|
||||
"condition": {
|
||||
"description": "Current weather condition code (WNUM).",
|
||||
|
||||
@@ -422,6 +422,8 @@ class ReolinkHost:
|
||||
"name": self._api.nvr_name,
|
||||
"base_url": self._base_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
"example_ip": "192.168.1.10",
|
||||
"example_url": "http://192.168.1.10:8123",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -436,6 +438,8 @@ class ReolinkHost:
|
||||
translation_placeholders={
|
||||
"base_url": self._base_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
"example_ip": "192.168.1.10",
|
||||
"example_url": "http://192.168.1.10:8123",
|
||||
},
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.17.0"]
|
||||
"requirements": ["reolink-aio==0.17.1"]
|
||||
}
|
||||
|
||||
@@ -1004,7 +1004,7 @@
|
||||
"title": "Reolink firmware update required"
|
||||
},
|
||||
"https_webhook": {
|
||||
"description": "Reolink products can not push motion events to an HTTPS address (SSL), please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}). The current (local) address is: `{base_url}`, a valid address could, for example, be `http://192.168.1.10:8123` where `192.168.1.10` is the IP of the Home Assistant device",
|
||||
"description": "Reolink products can not push motion events to an HTTPS address (SSL), please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}). The current (local) address is: `{base_url}`, a valid address could, for example, be `{example_url}` where `{example_ip}` is the IP of the Home Assistant device",
|
||||
"title": "Reolink webhook URL uses HTTPS (SSL)"
|
||||
},
|
||||
"password_too_long": {
|
||||
@@ -1016,7 +1016,7 @@
|
||||
"title": "Reolink incompatible with global SSL certificate"
|
||||
},
|
||||
"webhook_url": {
|
||||
"description": "Did not receive initial ONVIF state from {name}. Most likely, the Reolink camera can not reach the current (local) Home Assistant URL `{base_url}`, please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}) that points to Home Assistant. For example `http://192.168.1.10:8123` where `192.168.1.10` is the IP of the Home Assistant device. Also, make sure the Reolink camera can reach that URL. Using fast motion/AI state polling until the first ONVIF push is received.",
|
||||
"description": "Did not receive initial ONVIF state from {name}. Most likely, the Reolink camera can not reach the current (local) Home Assistant URL `{base_url}`, please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}) that points to Home Assistant. For example `{example_url}` where `{example_ip}` is the IP of the Home Assistant device. Also, make sure the Reolink camera can reach that URL. Using fast motion/AI state polling until the first ONVIF push is received.",
|
||||
"title": "Reolink webhook URL unreachable"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
"""The Rituals Perfume Genie integration."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pyrituals import Account, Diffuser
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from pyrituals import Account, AuthenticationException, Diffuser
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import ACCOUNT_HASH, DOMAIN, UPDATE_INTERVAL
|
||||
from .coordinator import RitualsDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.NUMBER,
|
||||
@@ -26,12 +29,38 @@ PLATFORMS = [
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Rituals Perfume Genie from a config entry."""
|
||||
# Initiate reauth for old config entries which don't have username / password in the entry data
|
||||
if CONF_EMAIL not in entry.data or CONF_PASSWORD not in entry.data:
|
||||
raise ConfigEntryAuthFailed("Missing credentials")
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
account = Account(session=session, account_hash=entry.data[ACCOUNT_HASH])
|
||||
|
||||
account = Account(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
# Authenticate first so API token/cookies are available for subsequent calls
|
||||
await account.authenticate()
|
||||
account_devices = await account.get_devices()
|
||||
except aiohttp.ClientError as err:
|
||||
|
||||
except AuthenticationException as err:
|
||||
# Credentials invalid/expired -> raise AuthFailed to trigger reauth flow
|
||||
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug(
|
||||
"HTTP error during Rituals setup: status=%s, url=%s, headers=%s",
|
||||
err.status,
|
||||
err.request_info,
|
||||
dict(err.headers or {}),
|
||||
)
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
except ClientError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
# Migrate old unique_ids to the new format
|
||||
@@ -45,7 +74,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Create a coordinator for each diffuser
|
||||
coordinators = {
|
||||
diffuser.hublot: RitualsDataUpdateCoordinator(
|
||||
hass, entry, diffuser, update_interval
|
||||
hass, entry, account, diffuser, update_interval
|
||||
)
|
||||
for diffuser in account_devices
|
||||
}
|
||||
@@ -106,3 +135,14 @@ def async_migrate_entities_unique_ids(
|
||||
registry_entry.entity_id,
|
||||
new_unique_id=f"{diffuser.hublot}-{new_unique_id}",
|
||||
)
|
||||
|
||||
|
||||
# Migration helpers for API v2
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate config entry to version 2: drop legacy ACCOUNT_HASH and bump version."""
|
||||
if entry.version < 2:
|
||||
data = dict(entry.data)
|
||||
data.pop(ACCOUNT_HASH, None)
|
||||
hass.config_entries.async_update_entry(entry, data=data, version=2)
|
||||
return True
|
||||
return True
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from collections.abc import Mapping
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from aiohttp import ClientError
|
||||
from pyrituals import Account, AuthenticationException
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -13,9 +13,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import ACCOUNT_HASH, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -28,39 +26,88 @@ DATA_SCHEMA = vol.Schema(
|
||||
class RitualsPerfumeGenieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Rituals Perfume Genie."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)
|
||||
|
||||
errors = {}
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
account = Account(user_input[CONF_EMAIL], user_input[CONF_PASSWORD], session)
|
||||
|
||||
try:
|
||||
await account.authenticate()
|
||||
except ClientResponseError:
|
||||
_LOGGER.exception("Unexpected response")
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(account.email)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=account.email,
|
||||
data={ACCOUNT_HASH: account.account_hash},
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
account = Account(
|
||||
email=user_input[CONF_EMAIL],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
await account.authenticate()
|
||||
except AuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except ClientError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_EMAIL])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL],
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication with Rituals."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Form to log in again."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert reauth_entry.unique_id is not None
|
||||
|
||||
if user_input:
|
||||
session = async_get_clientsession(self.hass)
|
||||
account = Account(
|
||||
email=reauth_entry.unique_id,
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
await account.authenticate()
|
||||
except AuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except ClientError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data={
|
||||
CONF_EMAIL: reauth_entry.unique_id,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
reauth_entry.data,
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ from datetime import timedelta
|
||||
|
||||
DOMAIN = "rituals_perfume_genie"
|
||||
|
||||
# Old (API V1)
|
||||
ACCOUNT_HASH = "account_hash"
|
||||
|
||||
# The API provided by Rituals is currently rate limited to 30 requests
|
||||
|
||||
@@ -3,11 +3,13 @@
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyrituals import Diffuser
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from pyrituals import Account, AuthenticationException, Diffuser
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -23,10 +25,12 @@ class RitualsDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
account: Account,
|
||||
diffuser: Diffuser,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize global Rituals Perfume Genie data updater."""
|
||||
self.account = account
|
||||
self.diffuser = diffuser
|
||||
super().__init__(
|
||||
hass,
|
||||
@@ -37,5 +41,36 @@ class RitualsDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data from Rituals."""
|
||||
await self.diffuser.update_data()
|
||||
"""Fetch data from Rituals, with one silent re-auth on 401.
|
||||
|
||||
If silent re-auth also fails, raise ConfigEntryAuthFailed to trigger reauth flow.
|
||||
Other HTTP/network errors are wrapped in UpdateFailed so HA can retry.
|
||||
"""
|
||||
try:
|
||||
await self.diffuser.update_data()
|
||||
except (AuthenticationException, ClientResponseError) as err:
|
||||
# Treat 401/403 like AuthenticationException → one silent re-auth, single retry
|
||||
if isinstance(err, ClientResponseError) and (status := err.status) not in (
|
||||
401,
|
||||
403,
|
||||
):
|
||||
# Non-auth HTTP error → let HA retry
|
||||
raise UpdateFailed(f"HTTP {status}") from err
|
||||
|
||||
self.logger.debug(
|
||||
"Auth issue detected (%r). Attempting silent re-auth.", err
|
||||
)
|
||||
try:
|
||||
await self.account.authenticate()
|
||||
await self.diffuser.update_data()
|
||||
except AuthenticationException as err2:
|
||||
# Credentials invalid → trigger HA reauth
|
||||
raise ConfigEntryAuthFailed from err2
|
||||
except ClientResponseError as err2:
|
||||
# Still HTTP auth errors after refresh → trigger HA reauth
|
||||
if err2.status in (401, 403):
|
||||
raise ConfigEntryAuthFailed from err2
|
||||
raise UpdateFailed(f"HTTP {err2.status}") from err2
|
||||
except ClientError as err:
|
||||
# Network issues (timeouts, DNS, etc.)
|
||||
raise UpdateFailed(f"Network error: {err!r}") from err
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"domain": "rituals_perfume_genie",
|
||||
"name": "Rituals Perfume Genie",
|
||||
"codeowners": ["@milanmeu", "@frenck"],
|
||||
"codeowners": ["@milanmeu", "@frenck", "@quebulm"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rituals_perfume_genie",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyrituals"],
|
||||
"requirements": ["pyrituals==0.0.6"]
|
||||
"requirements": ["pyrituals==0.0.7"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "Re-authentication was successful"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -9,6 +10,12 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"description": "Please enter the correct password."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
|
||||
@@ -17,6 +17,7 @@ from roborock import (
|
||||
from roborock.data import UserData
|
||||
from roborock.devices.device import RoborockDevice
|
||||
from roborock.devices.device_manager import UserParams, create_device_manager
|
||||
from roborock.map.map_parser import MapParserConfig
|
||||
|
||||
from homeassistant.const import CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -24,7 +25,16 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_BASE_URL, CONF_USER_DATA, DOMAIN, PLATFORMS
|
||||
from .const import (
|
||||
CONF_BASE_URL,
|
||||
CONF_SHOW_BACKGROUND,
|
||||
CONF_USER_DATA,
|
||||
DEFAULT_DRAWABLES,
|
||||
DOMAIN,
|
||||
DRAWABLES,
|
||||
MAP_SCALE,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .coordinator import (
|
||||
RoborockConfigEntry,
|
||||
RoborockCoordinators,
|
||||
@@ -56,6 +66,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
user_params,
|
||||
cache=cache,
|
||||
session=async_get_clientsession(hass),
|
||||
map_parser_config=MapParserConfig(
|
||||
drawables=[
|
||||
drawable
|
||||
for drawable, default_value in DEFAULT_DRAWABLES.items()
|
||||
if entry.options.get(DRAWABLES, {}).get(drawable, default_value)
|
||||
],
|
||||
show_background=entry.options.get(CONF_SHOW_BACKGROUND, False),
|
||||
map_scale=MAP_SCALE,
|
||||
),
|
||||
)
|
||||
except RoborockInvalidCredentials as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==3.9.3",
|
||||
"python-roborock==3.10.2",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sharkiq"],
|
||||
"requirements": ["sharkiq==1.4.2"]
|
||||
"requirements": ["sharkiq==1.5.0"]
|
||||
}
|
||||
|
||||
@@ -44,6 +44,7 @@ from .entity import (
|
||||
)
|
||||
from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
async_remove_shelly_entity,
|
||||
format_ble_addr,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
@@ -80,6 +81,7 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action="trigger_reboot",
|
||||
supported=lambda coordinator: coordinator.sleep_period == 0,
|
||||
),
|
||||
ShellyButtonDescription[ShellyBlockCoordinator](
|
||||
key="self_test",
|
||||
@@ -197,7 +199,8 @@ async def async_setup_entry(
|
||||
"""Set up button entities."""
|
||||
entry_data = config_entry.runtime_data
|
||||
coordinator: ShellyRpcCoordinator | ShellyBlockCoordinator | None
|
||||
if get_device_entry_gen(config_entry) in RPC_GENERATIONS:
|
||||
device_gen = get_device_entry_gen(config_entry)
|
||||
if device_gen in RPC_GENERATIONS:
|
||||
coordinator = entry_data.rpc
|
||||
else:
|
||||
coordinator = entry_data.block
|
||||
@@ -210,6 +213,12 @@ async def async_setup_entry(
|
||||
hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator)
|
||||
)
|
||||
|
||||
# Remove the 'restart' button for sleeping devices as it was mistakenly
|
||||
# added in https://github.com/home-assistant/core/pull/154673
|
||||
entry_sleep_period = config_entry.data[CONF_SLEEP_PERIOD]
|
||||
if device_gen in RPC_GENERATIONS and entry_sleep_period:
|
||||
async_remove_shelly_entity(hass, BUTTON_PLATFORM, f"{coordinator.mac}-reboot")
|
||||
|
||||
entities: list[ShellyButton] = []
|
||||
|
||||
entities.extend(
|
||||
@@ -224,7 +233,7 @@ async def async_setup_entry(
|
||||
return
|
||||
|
||||
# add RPC buttons
|
||||
if config_entry.data[CONF_SLEEP_PERIOD]:
|
||||
if entry_sleep_period:
|
||||
async_setup_entry_rpc(
|
||||
hass,
|
||||
config_entry,
|
||||
|
||||
@@ -72,7 +72,6 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]):
|
||||
def _get_starlink_data(self) -> StarlinkData:
|
||||
"""Retrieve Starlink data."""
|
||||
context = self.channel_context
|
||||
status = status_data(context)
|
||||
location = location_data(context)
|
||||
sleep = get_sleep_config(context)
|
||||
status, obstruction, alert = status_data(context)
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util.dt import now
|
||||
from homeassistant.util.variance import ignore_variance
|
||||
|
||||
from .coordinator import StarlinkConfigEntry, StarlinkData
|
||||
from .entity import StarlinkEntity
|
||||
@@ -91,6 +92,10 @@ class StarlinkAccumulationSensor(StarlinkSensorEntity, RestoreSensor):
|
||||
self._attr_native_value = last_native_value
|
||||
|
||||
|
||||
uptime_to_stable_datetime = ignore_variance(
|
||||
lambda value: now() - timedelta(seconds=value), timedelta(minutes=1)
|
||||
)
|
||||
|
||||
SENSORS: tuple[StarlinkSensorEntityDescription, ...] = (
|
||||
StarlinkSensorEntityDescription(
|
||||
key="ping",
|
||||
@@ -150,9 +155,7 @@ SENSORS: tuple[StarlinkSensorEntityDescription, ...] = (
|
||||
translation_key="last_restart",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: (
|
||||
now() - timedelta(seconds=data.status["uptime"], milliseconds=-500)
|
||||
).replace(microsecond=0),
|
||||
value_fn=lambda data: uptime_to_stable_datetime(data.status["uptime"]),
|
||||
entity_class=StarlinkSensorEntity,
|
||||
),
|
||||
StarlinkSensorEntityDescription(
|
||||
|
||||
@@ -524,6 +524,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async_send_telegram_message,
|
||||
schema=schema,
|
||||
supports_response=supports_response,
|
||||
description_placeholders={
|
||||
"formatting_options_url": "https://core.telegram.org/bots/api#formatting-options"
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -64,6 +64,12 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DESCRIPTION_PLACEHOLDERS: dict[str, str] = {
|
||||
"botfather_username": "@BotFather",
|
||||
"botfather_url": "https://t.me/botfather",
|
||||
"socks_url": "socks5://username:password@proxy_ip:proxy_port",
|
||||
}
|
||||
|
||||
STEP_USER_DATA_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): SelectSelector(
|
||||
@@ -310,10 +316,7 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow to create a new config entry for a Telegram bot."""
|
||||
|
||||
description_placeholders: dict[str, str] = {
|
||||
"botfather_username": "@BotFather",
|
||||
"botfather_url": "https://t.me/botfather",
|
||||
}
|
||||
description_placeholders: dict[str, str] = DESCRIPTION_PLACEHOLDERS.copy()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
@@ -552,13 +555,14 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
},
|
||||
),
|
||||
description_placeholders=DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
user_input[CONF_PROXY_URL] = user_input[SECTION_ADVANCED_SETTINGS].get(
|
||||
CONF_PROXY_URL
|
||||
)
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = DESCRIPTION_PLACEHOLDERS.copy()
|
||||
|
||||
user_input[CONF_API_KEY] = api_key
|
||||
bot_name = await self._validate_bot(
|
||||
|
||||
@@ -60,7 +60,7 @@
|
||||
"proxy_url": "Proxy URL"
|
||||
},
|
||||
"data_description": {
|
||||
"proxy_url": "Proxy URL if working behind one, optionally including username and password.\n(socks5://username:password@proxy_ip:proxy_port)"
|
||||
"proxy_url": "Proxy URL if working behind one, optionally including username and password.\n({socks_url})"
|
||||
},
|
||||
"name": "Advanced settings"
|
||||
}
|
||||
@@ -400,7 +400,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the media.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"chat_id": {
|
||||
@@ -499,7 +499,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the animation.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
@@ -600,7 +600,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the document.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
@@ -745,7 +745,7 @@
|
||||
"name": "Keyboard"
|
||||
},
|
||||
"message": {
|
||||
"description": "Message body of the notification.",
|
||||
"description": "Message body of the notification.\nCan't parse entities? Format your message according to the [formatting options]({formatting_options_url}).",
|
||||
"name": "Message"
|
||||
},
|
||||
"message_tag": {
|
||||
@@ -757,7 +757,7 @@
|
||||
"name": "Message thread ID"
|
||||
},
|
||||
"parse_mode": {
|
||||
"description": "Parser for the message text.",
|
||||
"description": "Parser for the message text.\nSee [formatting options]({formatting_options_url}) for more details.",
|
||||
"name": "Parse mode"
|
||||
},
|
||||
"reply_to_message_id": {
|
||||
@@ -787,7 +787,7 @@
|
||||
"name": "Authentication method"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the image.",
|
||||
"description": "The title of the media.\nCan't parse entities? Format your message according to the [formatting options]({formatting_options_url}).",
|
||||
"name": "Caption"
|
||||
},
|
||||
"config_entry_id": {
|
||||
@@ -991,7 +991,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the video.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
@@ -1070,7 +1070,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the voice message.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Helpers for template integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
import hashlib
|
||||
import itertools
|
||||
import logging
|
||||
@@ -33,6 +33,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
async_get_platforms,
|
||||
)
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity
|
||||
from homeassistant.helpers.script_variables import ScriptVariables
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import yaml as yaml_util
|
||||
@@ -190,12 +191,12 @@ def async_create_template_tracking_entities(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
def _format_template(value: Any) -> Any:
|
||||
def _format_template(value: Any, field: str | None = None) -> Any:
|
||||
if isinstance(value, template.Template):
|
||||
return value.template
|
||||
|
||||
if isinstance(value, Enum):
|
||||
return value.name
|
||||
if isinstance(value, StrEnum):
|
||||
return value.value
|
||||
|
||||
if isinstance(value, (int, float, str, bool)):
|
||||
return value
|
||||
@@ -207,14 +208,13 @@ def format_migration_config(
|
||||
config: ConfigType | list[ConfigType], depth: int = 0
|
||||
) -> ConfigType | list[ConfigType]:
|
||||
"""Recursive method to format templates as strings from ConfigType."""
|
||||
types = (dict, list)
|
||||
if depth > 9:
|
||||
raise RecursionError
|
||||
|
||||
if isinstance(config, list):
|
||||
items = []
|
||||
for item in config:
|
||||
if isinstance(item, types):
|
||||
if isinstance(item, (dict, list)):
|
||||
if len(item) > 0:
|
||||
items.append(format_migration_config(item, depth + 1))
|
||||
else:
|
||||
@@ -223,9 +223,18 @@ def format_migration_config(
|
||||
|
||||
formatted_config = {}
|
||||
for field, value in config.items():
|
||||
if isinstance(value, types):
|
||||
if isinstance(value, dict):
|
||||
if len(value) > 0:
|
||||
formatted_config[field] = format_migration_config(value, depth + 1)
|
||||
elif isinstance(value, list):
|
||||
if len(value) > 0:
|
||||
formatted_config[field] = format_migration_config(value, depth + 1)
|
||||
else:
|
||||
formatted_config[field] = []
|
||||
elif isinstance(value, ScriptVariables):
|
||||
formatted_config[field] = format_migration_config(
|
||||
value.as_dict(), depth + 1
|
||||
)
|
||||
else:
|
||||
formatted_config[field] = _format_template(value)
|
||||
|
||||
@@ -260,9 +269,9 @@ def create_legacy_template_issue(
|
||||
try:
|
||||
config.pop(CONF_PLATFORM, None)
|
||||
modified_yaml = format_migration_config(config)
|
||||
yaml_config = yaml_util.dump({DOMAIN: [{domain: [modified_yaml]}]})
|
||||
# Format to show up properly in a numbered bullet on the repair.
|
||||
yaml_config = " ```\n " + yaml_config.replace("\n", "\n ") + "```"
|
||||
yaml_config = (
|
||||
f"```\n{yaml_util.dump({DOMAIN: [{domain: [modified_yaml]}]})}\n```"
|
||||
)
|
||||
except RecursionError:
|
||||
yaml_config = f"{DOMAIN}:\n - {domain}: - ..."
|
||||
|
||||
@@ -278,6 +287,7 @@ def create_legacy_template_issue(
|
||||
"domain": domain,
|
||||
"breadcrumb": breadcrumb,
|
||||
"config": yaml_config,
|
||||
"filename": "<filename>",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -635,14 +635,14 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
# Support legacy mireds in template light.
|
||||
temperature = int(render)
|
||||
if (min_kelvin := self._attr_min_color_temp_kelvin) is not None:
|
||||
min_mireds = color_util.color_temperature_kelvin_to_mired(min_kelvin)
|
||||
else:
|
||||
min_mireds = DEFAULT_MIN_MIREDS
|
||||
|
||||
if (max_kelvin := self._attr_max_color_temp_kelvin) is not None:
|
||||
max_mireds = color_util.color_temperature_kelvin_to_mired(max_kelvin)
|
||||
max_mireds = color_util.color_temperature_kelvin_to_mired(min_kelvin)
|
||||
else:
|
||||
max_mireds = DEFAULT_MAX_MIREDS
|
||||
|
||||
if (max_kelvin := self._attr_max_color_temp_kelvin) is not None:
|
||||
min_mireds = color_util.color_temperature_kelvin_to_mired(max_kelvin)
|
||||
else:
|
||||
min_mireds = DEFAULT_MIN_MIREDS
|
||||
if min_mireds <= temperature <= max_mireds:
|
||||
self._attr_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(temperature)
|
||||
@@ -856,42 +856,36 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
|
||||
try:
|
||||
if render in (None, "None", ""):
|
||||
self._attr_max_mireds = DEFAULT_MAX_MIREDS
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
return
|
||||
|
||||
self._attr_max_mireds = max_mireds = int(render)
|
||||
self._attr_max_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(max_mireds)
|
||||
self._attr_min_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(int(render))
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.exception(
|
||||
"Template must supply an integer temperature within the range for"
|
||||
" this light, or 'None'"
|
||||
)
|
||||
self._attr_max_mireds = DEFAULT_MAX_MIREDS
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
|
||||
@callback
|
||||
def _update_min_mireds(self, render):
|
||||
"""Update the min mireds from the template."""
|
||||
try:
|
||||
if render in (None, "None", ""):
|
||||
self._attr_min_mireds = DEFAULT_MIN_MIREDS
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
return
|
||||
|
||||
self._attr_min_mireds = min_mireds = int(render)
|
||||
self._attr_min_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(min_mireds)
|
||||
self._attr_max_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(int(render))
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.exception(
|
||||
"Template must supply an integer temperature within the range for"
|
||||
" this light, or 'None'"
|
||||
)
|
||||
self._attr_min_mireds = DEFAULT_MIN_MIREDS
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
|
||||
@callback
|
||||
def _update_supports_transition(self, render):
|
||||
|
||||
@@ -529,7 +529,7 @@
|
||||
"title": "Deprecated battery level option in {entity_name}"
|
||||
},
|
||||
"deprecated_legacy_templates": {
|
||||
"description": "The legacy `platform: template` syntax for `{domain}` is being removed. Please migrate `{breadcrumb}` to the modern template syntax.\n\n1. Remove existing template definition.\n2. Add new template definition:\n{config}\n3. Restart Home Assistant or reload template entities.",
|
||||
"description": "The legacy `platform: template` syntax for `{domain}` is being removed. Please migrate `{breadcrumb}` to the modern template syntax.\n#### Step 1 - Remove legacy configuration\nRemove the `{breadcrumb}` template definition from the `configuration.yaml` `{domain}:` section.\n\n**Note:** If you are using `{domain}: !include {filename}.yaml` in `configuration.yaml`, remove the {domain} definition from the included `{filename}.yaml`.\n#### Step 2 - Add the modern configuration\nAdd new template definition inside `configuration.yaml`:\n{config}\n**Note:** If there are any existing `template:` sections in your configuration, make sure to omit the `template:` line from the yaml above. There can only be 1 `template:` section in `configuration.yaml`. Also, ensure the indentation is aligned with the existing entities within the `template:` section.\n#### Step 3 - Restart Home Assistant or reload template entities",
|
||||
"title": "Legacy {domain} template deprecation"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -325,6 +325,9 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
vol.Required(ATTR_TOU_SETTINGS): dict,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"time_of_use_url": "https://developer.tesla.com/docs/fleet-api#time_of_use_settings"
|
||||
},
|
||||
)
|
||||
|
||||
async def add_charge_schedule(call: ServiceCall) -> None:
|
||||
|
||||
@@ -1358,7 +1358,7 @@
|
||||
"name": "Energy Site"
|
||||
},
|
||||
"tou_settings": {
|
||||
"description": "See https://developer.tesla.com/docs/fleet-api#time_of_use_settings for details.",
|
||||
"description": "See {time_use_url} for details.",
|
||||
"name": "Settings"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -125,6 +125,18 @@ def _get_temperature_wrappers(
|
||||
device, DPCode.TEMP_SET_F, prefer_function=True
|
||||
)
|
||||
|
||||
# If there is a temp unit convert dpcode, override empty units
|
||||
if (
|
||||
temp_unit_convert := DPCodeEnumWrapper.find_dpcode(
|
||||
device, DPCode.TEMP_UNIT_CONVERT
|
||||
)
|
||||
) is not None:
|
||||
for wrapper in (temp_current, temp_current_f, temp_set, temp_set_f):
|
||||
if wrapper is not None and not wrapper.type_information.unit:
|
||||
wrapper.type_information.unit = temp_unit_convert.read_device_status(
|
||||
device
|
||||
)
|
||||
|
||||
# Get wrappers for celsius and fahrenheit
|
||||
# We need to check the unit of measurement
|
||||
current_celsius = _get_temperature_wrapper(
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"requirements": ["uiprotect==7.31.0", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==7.33.2", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -66,7 +66,7 @@ async def async_setup_entry(
|
||||
@callback
|
||||
def discover(devices):
|
||||
"""Add new devices to platform."""
|
||||
_setup_entities(devices, async_add_entities)
|
||||
_setup_entities(devices, async_add_entities, coordinator)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(hass, VS_DISCOVERY.format(VS_DEVICES), discover)
|
||||
|
||||
@@ -19,6 +19,8 @@ from pythonxbox.api.provider.smartglass.models import (
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType
|
||||
|
||||
from .entity import to_https
|
||||
|
||||
|
||||
class MediaTypeDetails(NamedTuple):
|
||||
"""Details for media type."""
|
||||
@@ -151,5 +153,5 @@ def _find_media_image(images: list[Image]) -> str | None:
|
||||
if match := next(
|
||||
(image for image in images if image.image_purpose == purpose), None
|
||||
):
|
||||
return f"https:{match.uri}" if match.uri.startswith("/") else match.uri
|
||||
return to_https(match.uri)
|
||||
return None
|
||||
|
||||
@@ -151,6 +151,15 @@ def check_deprecated_entity(
|
||||
return False
|
||||
|
||||
|
||||
def to_https(image_url: str) -> str:
|
||||
"""Convert image URLs to secure URLs."""
|
||||
|
||||
url = URL(image_url)
|
||||
if url.host == "images-eds.xboxlive.com":
|
||||
url = url.with_host("images-eds-ssl.xboxlive.com")
|
||||
return str(url.with_scheme("https"))
|
||||
|
||||
|
||||
def profile_pic(person: Person, _: Title | None = None) -> str | None:
|
||||
"""Return the gamer pic."""
|
||||
|
||||
@@ -160,9 +169,4 @@ def profile_pic(person: Person, _: Title | None = None) -> str | None:
|
||||
# to point to the correct image, with the correct domain and certificate.
|
||||
# We need to also remove the 'mode=Padding' query because with it,
|
||||
# it results in an error 400.
|
||||
url = URL(person.display_pic_raw)
|
||||
if url.host == "images-eds.xboxlive.com":
|
||||
url = url.with_host("images-eds-ssl.xboxlive.com").with_scheme("https")
|
||||
query = dict(url.query)
|
||||
query.pop("mode", None)
|
||||
return str(url.with_query(query))
|
||||
return str(URL(to_https(person.display_pic_raw)).without_query_params("mode"))
|
||||
|
||||
@@ -22,6 +22,7 @@ from homeassistant.util import dt as dt_util
|
||||
from .binary_sensor import profile_pic
|
||||
from .const import DOMAIN
|
||||
from .coordinator import XboxConfigEntry
|
||||
from .entity import to_https
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -655,6 +656,6 @@ def game_thumbnail(images: list[Image]) -> str | None:
|
||||
(i for i in images if i.type == img_type),
|
||||
None,
|
||||
):
|
||||
return match.url
|
||||
return to_https(match.url)
|
||||
|
||||
return None
|
||||
|
||||
@@ -34,6 +34,7 @@ from .entity import (
|
||||
XboxBaseEntity,
|
||||
XboxBaseEntityDescription,
|
||||
check_deprecated_entity,
|
||||
to_https,
|
||||
)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -142,8 +143,8 @@ def title_logo(_: Person, title: Title | None) -> str | None:
|
||||
"""Get the game logo."""
|
||||
|
||||
return (
|
||||
next((i.url for i in title.images if i.type == "Tile"), None)
|
||||
or next((i.url for i in title.images if i.type == "Logo"), None)
|
||||
next((to_https(i.url) for i in title.images if i.type == "Tile"), None)
|
||||
or next((to_https(i.url) for i in title.images if i.type == "Logo"), None)
|
||||
if title and title.images
|
||||
else None
|
||||
)
|
||||
|
||||
@@ -44,6 +44,11 @@ GATEWAY_SETTINGS = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
ERROR_STEP_PLACEHOLDERS = {
|
||||
"tutorial_url": "https://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz",
|
||||
"invalid_host_url": "https://www.home-assistant.io/integrations/xiaomi_aqara/#connection-problem",
|
||||
}
|
||||
|
||||
|
||||
class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a Xiaomi Aqara config flow."""
|
||||
@@ -66,7 +71,12 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if (self.host is None and self.sid is None) or errors:
|
||||
schema = GATEWAY_CONFIG_HOST
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=schema,
|
||||
errors=errors,
|
||||
description_placeholders=ERROR_STEP_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -149,7 +159,10 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="select", data_schema=select_schema, errors=errors
|
||||
step_id="select",
|
||||
data_schema=select_schema,
|
||||
errors=errors,
|
||||
description_placeholders=ERROR_STEP_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
@@ -236,5 +249,8 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors[CONF_KEY] = "invalid_key"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="settings", data_schema=GATEWAY_SETTINGS, errors=errors
|
||||
step_id="settings",
|
||||
data_schema=GATEWAY_SETTINGS,
|
||||
errors=errors,
|
||||
description_placeholders=ERROR_STEP_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"error": {
|
||||
"discovery_error": "Failed to discover a Xiaomi Aqara Gateway, try using the IP of the device running Home Assistant as interface",
|
||||
"invalid_host": "Invalid hostname or IP address, see https://www.home-assistant.io/integrations/xiaomi_aqara/#connection-problem",
|
||||
"invalid_host": "Invalid hostname or IP address, see {invalid_host_url}",
|
||||
"invalid_interface": "Invalid network interface",
|
||||
"invalid_key": "Invalid Gateway key",
|
||||
"invalid_mac": "Invalid MAC address"
|
||||
@@ -25,7 +25,7 @@
|
||||
"key": "The key of your Gateway",
|
||||
"name": "Name of the Gateway"
|
||||
},
|
||||
"description": "The key (password) can be retrieved using this tutorial: https://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz. If the key is not provided only sensors will be accessible",
|
||||
"description": "The key (password) can be retrieved using this tutorial: {tutorial_url}. If the key is not provided only sensors will be accessible",
|
||||
"title": "Optional settings"
|
||||
},
|
||||
"user": {
|
||||
|
||||
@@ -380,7 +380,12 @@ def _async_setup_services(hass: HomeAssistant):
|
||||
SERVICE_SET_MODE, SERVICE_SCHEMA_SET_MODE, "async_set_mode"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_START_FLOW, SERVICE_SCHEMA_START_FLOW, _async_start_flow
|
||||
SERVICE_START_FLOW,
|
||||
SERVICE_SCHEMA_START_FLOW,
|
||||
_async_start_flow,
|
||||
description_placeholders={
|
||||
"flow_objects_urls": "https://yeelight.readthedocs.io/en/stable/yeelight.html#flow-objects"
|
||||
},
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_COLOR_SCENE, SERVICE_SCHEMA_SET_COLOR_SCENE, _async_set_color_scene
|
||||
@@ -397,6 +402,9 @@ def _async_setup_services(hass: HomeAssistant):
|
||||
SERVICE_SET_COLOR_FLOW_SCENE,
|
||||
SERVICE_SCHEMA_SET_COLOR_FLOW_SCENE,
|
||||
_async_set_color_flow_scene,
|
||||
description_placeholders={
|
||||
"examples_url": "https://yeelight.readthedocs.io/en/stable/flow.html"
|
||||
},
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_AUTO_DELAY_OFF_SCENE,
|
||||
|
||||
@@ -102,7 +102,7 @@
|
||||
"name": "Count"
|
||||
},
|
||||
"transitions": {
|
||||
"description": "Array of transitions, for desired effect. Examples https://yeelight.readthedocs.io/en/stable/flow.html.",
|
||||
"description": "Array of transitions, for desired effect. Examples {examples_url}.",
|
||||
"name": "Transitions"
|
||||
}
|
||||
},
|
||||
@@ -171,7 +171,7 @@
|
||||
"name": "Set music mode"
|
||||
},
|
||||
"start_flow": {
|
||||
"description": "Starts a custom flow, using transitions from https://yeelight.readthedocs.io/en/stable/yeelight.html#flow-objects.",
|
||||
"description": "Starts a custom flow, using transitions from {flow_objects_urls}.",
|
||||
"fields": {
|
||||
"action": {
|
||||
"description": "[%key:component::yeelight::services::set_color_flow_scene::fields::action::description%]",
|
||||
|
||||
@@ -680,6 +680,13 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
|
||||
try:
|
||||
await self._form_network_task
|
||||
except Exception as exc:
|
||||
_LOGGER.exception("Failed to form new network")
|
||||
self._progress_error = AbortFlow(
|
||||
reason="cannot_form_network",
|
||||
description_placeholders={"error": str(exc)},
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._form_network_task = None
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"cannot_form_network": "Could not form a new Zigbee network.\n\nError: {error}",
|
||||
"cannot_resolve_path": "Could not resolve device path: {path}",
|
||||
"cannot_restore_backup": "The adapter you are restoring to does not properly support backup restoration. Please upgrade the firmware.\n\nError: {error}",
|
||||
"cannot_restore_backup_no_ieee_confirm": "The adapter you are restoring to has outdated firmware and cannot write the adapter IEEE address multiple times. Please upgrade the firmware or confirm permanent overwrite in the previous step.",
|
||||
@@ -1913,16 +1914,17 @@
|
||||
"title": "Zigbee network settings have changed"
|
||||
},
|
||||
"wrong_silabs_firmware_installed_nabucasa": {
|
||||
"description": "Your Zigbee adapter was previously used with multiprotocol (Zigbee and Thread) and still has multiprotocol firmware installed: ({firmware_type}).\n\nTo run your adapter exclusively with ZHA, you need to install the Zigbee firmware:\n - Go to Settings > System > Hardware, select the device and select Configure.\n - Select the 'Migrate Zigbee to a new adapter' option and follow the instructions.",
|
||||
"title": "Zigbee adapter with multiprotocol firmware detected"
|
||||
"description": "Your Zigbee adapter is currently in an incorrect state: {firmware_type}.\n\nThe device may have Thread or multiprotocol firmware installed, or it may be stuck in the bootloader. To resolve this, try to unplug the adapter temporarily.\n\nIf the issue persists and you need to install Zigbee firmware:\n - Go to Settings > System > Hardware, select the device and select Configure.\n - Select the 'Migrate Zigbee to a new adapter' option and follow the instructions.",
|
||||
"title": "Zigbee adapter in incorrect state"
|
||||
},
|
||||
"wrong_silabs_firmware_installed_other": {
|
||||
"description": "Your Zigbee adapter was previously used with multiprotocol (Zigbee and Thread) and still has multiprotocol firmware installed: ({firmware_type}).\n\nTo run your adapter exclusively with ZHA, you need to install Zigbee firmware. Follow your Zigbee adapter manufacturer's instructions for how to do this.",
|
||||
"description": "Your Zigbee adapter is currently in an incorrect state: {firmware_type}.\n\nThe device may have Thread or multiprotocol firmware installed, or it may be stuck in the bootloader. To resolve this, try to unplug the adapter temporarily.\n\nIf the issue persists and you need to install Zigbee firmware, follow your Zigbee adapter manufacturer's instructions for how to do this.",
|
||||
"title": "[%key:component::zha::issues::wrong_silabs_firmware_installed_nabucasa::title%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"abort": {
|
||||
"cannot_form_network": "[%key:component::zha::config::abort::cannot_form_network%]",
|
||||
"cannot_resolve_path": "[%key:component::zha::config::abort::cannot_resolve_path%]",
|
||||
"cannot_restore_backup": "[%key:component::zha::config::abort::cannot_restore_backup%]",
|
||||
"cannot_restore_backup_no_ieee_confirm": "[%key:component::zha::config::abort::cannot_restore_backup_no_ieee_confirm%]",
|
||||
|
||||
@@ -306,6 +306,9 @@ class ZWaveServices:
|
||||
has_at_least_one_node,
|
||||
),
|
||||
),
|
||||
description_placeholders={
|
||||
"api_docs_url": "https://zwave-js.github.io/node-zwave-js/#/api/CCs/index"
|
||||
},
|
||||
)
|
||||
|
||||
self._hass.services.async_register(
|
||||
|
||||
@@ -400,11 +400,11 @@
|
||||
"name": "Entity ID(s)"
|
||||
},
|
||||
"method_name": {
|
||||
"description": "The name of the API method to call. Refer to the Z-Wave Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for available methods.",
|
||||
"description": "The name of the API method to call. Refer to the Z-Wave Command Class API documentation ({api_docs_url}) for available methods.",
|
||||
"name": "Method name"
|
||||
},
|
||||
"parameters": {
|
||||
"description": "A list of parameters to pass to the API method. Refer to the Z-Wave Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for parameters.",
|
||||
"description": "A list of parameters to pass to the API method. Refer to the Z-Wave Command Class API documentation ({api_docs_url}) for parameters.",
|
||||
"name": "Parameters"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 12
|
||||
PATCH_VERSION: Final = "0"
|
||||
PATCH_VERSION: Final = "1"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
||||
@@ -124,6 +124,12 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = {
|
||||
# Added in 2025.10.0 because of
|
||||
# https://github.com/frenck/spook/issues/1066
|
||||
"spook": BlockedIntegration(AwesomeVersion("4.0.0"), "breaks the template engine"),
|
||||
# Added in 2025.12.1 because of
|
||||
# https://github.com/JaccoR/hass-entso-e/issues/263
|
||||
"entsoe": BlockedIntegration(
|
||||
AwesomeVersion("0.7.1"),
|
||||
"crashes Home Assistant when it can't connect to the API",
|
||||
),
|
||||
}
|
||||
|
||||
DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey(
|
||||
|
||||
@@ -39,7 +39,7 @@ habluetooth==5.7.0
|
||||
hass-nabucasa==1.7.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20251203.0
|
||||
home-assistant-frontend==20251203.1
|
||||
home-assistant-intents==2025.12.2
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.12.0"
|
||||
version = "2025.12.1"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
|
||||
16
requirements_all.txt
generated
16
requirements_all.txt
generated
@@ -935,7 +935,7 @@ eufylife-ble-client==0.1.8
|
||||
# evdev==1.6.1
|
||||
|
||||
# homeassistant.components.evohome
|
||||
evohome-async==1.0.5
|
||||
evohome-async==1.0.6
|
||||
|
||||
# homeassistant.components.bryant_evolution
|
||||
evolutionhttp==0.0.18
|
||||
@@ -1198,7 +1198,7 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251203.0
|
||||
home-assistant-frontend==20251203.1
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.12.2
|
||||
@@ -1669,7 +1669,7 @@ openwrt-ubus-rpc==0.0.2
|
||||
opower==0.15.9
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==0.17.6
|
||||
oralb-ble==1.0.2
|
||||
|
||||
# homeassistant.components.oru
|
||||
oru==0.1.11
|
||||
@@ -2342,7 +2342,7 @@ pyrepetierng==0.1.0
|
||||
pyrisco==0.6.7
|
||||
|
||||
# homeassistant.components.rituals_perfume_genie
|
||||
pyrituals==0.0.6
|
||||
pyrituals==0.0.7
|
||||
|
||||
# homeassistant.components.thread
|
||||
pyroute2==0.7.5
|
||||
@@ -2557,7 +2557,7 @@ python-rabbitair==0.0.8
|
||||
python-ripple-api==0.0.3
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==3.9.3
|
||||
python-roborock==3.10.2
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.45
|
||||
@@ -2717,7 +2717,7 @@ renault-api==0.5.1
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.17.0
|
||||
reolink-aio==0.17.1
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
@@ -2820,7 +2820,7 @@ sentry-sdk==1.45.1
|
||||
sfrbox-api==0.1.0
|
||||
|
||||
# homeassistant.components.sharkiq
|
||||
sharkiq==1.4.2
|
||||
sharkiq==1.5.0
|
||||
|
||||
# homeassistant.components.aquostv
|
||||
sharp_aquos_rc==0.3.2
|
||||
@@ -3050,7 +3050,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==7.31.0
|
||||
uiprotect==7.33.2
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
|
||||
16
requirements_test_all.txt
generated
16
requirements_test_all.txt
generated
@@ -823,7 +823,7 @@ eternalegypt==0.0.16
|
||||
eufylife-ble-client==0.1.8
|
||||
|
||||
# homeassistant.components.evohome
|
||||
evohome-async==1.0.5
|
||||
evohome-async==1.0.6
|
||||
|
||||
# homeassistant.components.bryant_evolution
|
||||
evolutionhttp==0.0.18
|
||||
@@ -1056,7 +1056,7 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251203.0
|
||||
home-assistant-frontend==20251203.1
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.12.2
|
||||
@@ -1437,7 +1437,7 @@ openwebifpy==4.3.1
|
||||
opower==0.15.9
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==0.17.6
|
||||
oralb-ble==1.0.2
|
||||
|
||||
# homeassistant.components.ourgroceries
|
||||
ourgroceries==1.5.4
|
||||
@@ -1968,7 +1968,7 @@ pyrate-limiter==3.9.0
|
||||
pyrisco==0.6.7
|
||||
|
||||
# homeassistant.components.rituals_perfume_genie
|
||||
pyrituals==0.0.6
|
||||
pyrituals==0.0.7
|
||||
|
||||
# homeassistant.components.thread
|
||||
pyroute2==0.7.5
|
||||
@@ -2135,7 +2135,7 @@ python-pooldose==0.7.8
|
||||
python-rabbitair==0.0.8
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==3.9.3
|
||||
python-roborock==3.10.2
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.45
|
||||
@@ -2271,7 +2271,7 @@ renault-api==0.5.1
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.17.0
|
||||
reolink-aio==0.17.1
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.67
|
||||
@@ -2353,7 +2353,7 @@ sentry-sdk==1.45.1
|
||||
sfrbox-api==0.1.0
|
||||
|
||||
# homeassistant.components.sharkiq
|
||||
sharkiq==1.4.2
|
||||
sharkiq==1.5.0
|
||||
|
||||
# homeassistant.components.simplefin
|
||||
simplefin4py==0.0.18
|
||||
@@ -2535,7 +2535,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==7.31.0
|
||||
uiprotect==7.33.2
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
|
||||
@@ -40,6 +40,7 @@ async def test_full_flow(
|
||||
user_input={
|
||||
CONF_USERNAME: USERNAME,
|
||||
CONF_PASSWORD: PASSWORD,
|
||||
CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -74,6 +75,7 @@ async def test_already_configured(
|
||||
user_input={
|
||||
CONF_USERNAME: USERNAME,
|
||||
CONF_PASSWORD: PASSWORD,
|
||||
CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -107,6 +109,7 @@ async def test_auth_recover_exception(
|
||||
user_input={
|
||||
CONF_USERNAME: USERNAME,
|
||||
CONF_PASSWORD: PASSWORD,
|
||||
CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -123,6 +126,7 @@ async def test_auth_recover_exception(
|
||||
user_input={
|
||||
CONF_USERNAME: USERNAME,
|
||||
CONF_PASSWORD: PASSWORD,
|
||||
CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -164,6 +168,7 @@ async def test_account_recover_exception(
|
||||
user_input={
|
||||
CONF_USERNAME: USERNAME,
|
||||
CONF_PASSWORD: PASSWORD,
|
||||
CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -82,6 +82,10 @@ def patch_doorbird_api_entry_points(api: MagicMock) -> Generator[DoorBird]:
|
||||
"homeassistant.components.doorbird.config_flow.DoorBird",
|
||||
return_value=api,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.doorbird.device.get_url",
|
||||
return_value="http://127.0.0.1:8123",
|
||||
),
|
||||
):
|
||||
yield api
|
||||
|
||||
|
||||
@@ -2,15 +2,141 @@
|
||||
|
||||
from copy import deepcopy
|
||||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
|
||||
from doorbirdpy import DoorBirdScheduleEntry
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.doorbird.const import CONF_EVENTS
|
||||
from homeassistant.components.doorbird.const import (
|
||||
CONF_EVENTS,
|
||||
DEFAULT_DOORBELL_EVENT,
|
||||
DEFAULT_MOTION_EVENT,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import VALID_CONFIG
|
||||
from .conftest import DoorbirdMockerType
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def doorbird_favorites_with_stale() -> dict[str, dict[str, Any]]:
|
||||
"""Return favorites fixture with stale favorites from another HA instance.
|
||||
|
||||
Creates favorites where identifier "2" has the same event name as "0"
|
||||
(mydoorbird_doorbell) but points to a different HA instance URL.
|
||||
These stale favorites should be filtered out.
|
||||
"""
|
||||
return {
|
||||
"http": {
|
||||
"0": {
|
||||
"title": "Home Assistant (mydoorbird_doorbell)",
|
||||
"value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_doorbell?token=test-token",
|
||||
},
|
||||
# Stale favorite from a different HA instance - should be filtered out
|
||||
"2": {
|
||||
"title": "Home Assistant (mydoorbird_doorbell)",
|
||||
"value": "http://old-ha-instance:8123/api/doorbird/mydoorbird_doorbell?token=old-token",
|
||||
},
|
||||
"5": {
|
||||
"title": "Home Assistant (mydoorbird_motion)",
|
||||
"value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_motion?token=test-token",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def doorbird_schedule_with_stale() -> list[DoorBirdScheduleEntry]:
|
||||
"""Return schedule fixture with outputs referencing stale favorites.
|
||||
|
||||
Both param "0" and "2" map to doorbell input, but "2" is a stale favorite.
|
||||
"""
|
||||
schedule_data = [
|
||||
{
|
||||
"input": "doorbell",
|
||||
"param": "1",
|
||||
"output": [
|
||||
{
|
||||
"event": "http",
|
||||
"param": "0",
|
||||
"schedule": {"weekdays": [{"to": "107999", "from": "108000"}]},
|
||||
},
|
||||
{
|
||||
"event": "http",
|
||||
"param": "2",
|
||||
"schedule": {"weekdays": [{"to": "107999", "from": "108000"}]},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"input": "motion",
|
||||
"param": "",
|
||||
"output": [
|
||||
{
|
||||
"event": "http",
|
||||
"param": "5",
|
||||
"schedule": {"weekdays": [{"to": "107999", "from": "108000"}]},
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
return DoorBirdScheduleEntry.parse_all(schedule_data)
|
||||
|
||||
|
||||
async def test_stale_favorites_filtered_by_url(
|
||||
hass: HomeAssistant,
|
||||
doorbird_mocker: DoorbirdMockerType,
|
||||
doorbird_favorites_with_stale: dict[str, dict[str, Any]],
|
||||
doorbird_schedule_with_stale: list[DoorBirdScheduleEntry],
|
||||
) -> None:
|
||||
"""Test that stale favorites from other HA instances are filtered out."""
|
||||
await doorbird_mocker(
|
||||
favorites=doorbird_favorites_with_stale,
|
||||
schedule=doorbird_schedule_with_stale,
|
||||
)
|
||||
# Should have 2 event entities - stale favorite "2" is filtered out
|
||||
# because its URL doesn't match the current HA instance
|
||||
event_entities = hass.states.async_all("event")
|
||||
assert len(event_entities) == 2
|
||||
|
||||
|
||||
async def test_custom_url_used_for_favorites(
|
||||
hass: HomeAssistant,
|
||||
doorbird_mocker: DoorbirdMockerType,
|
||||
) -> None:
|
||||
"""Test that custom URL override is used instead of get_url."""
|
||||
custom_url = "https://my-custom-url.example.com:8443"
|
||||
favorites = {
|
||||
"http": {
|
||||
"1": {
|
||||
"title": "Home Assistant (mydoorbird_doorbell)",
|
||||
"value": f"{custom_url}/api/doorbird/mydoorbird_doorbell?token=test-token",
|
||||
},
|
||||
"2": {
|
||||
"title": "Home Assistant (mydoorbird_motion)",
|
||||
"value": f"{custom_url}/api/doorbird/mydoorbird_motion?token=test-token",
|
||||
},
|
||||
}
|
||||
}
|
||||
config_with_custom_url = {
|
||||
**VALID_CONFIG,
|
||||
"hass_url_override": custom_url,
|
||||
}
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id="1CCAE3AAAAAA",
|
||||
data=config_with_custom_url,
|
||||
options={CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT]},
|
||||
)
|
||||
await doorbird_mocker(entry=entry, favorites=favorites)
|
||||
|
||||
# Should have 2 event entities using the custom URL
|
||||
event_entities = hass.states.async_all("event")
|
||||
assert len(event_entities) == 2
|
||||
|
||||
|
||||
async def test_no_configured_events(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -16,6 +16,7 @@ TEST_INSTALLS: Final = (
|
||||
"h032585", # VisionProWifi: no preset modes for TCS, zoneId=systemId
|
||||
"h099625", # RoundThermostat
|
||||
"h139906", # zone with null schedule
|
||||
"h157546", # tcs with long 8-digit system_id
|
||||
"sys_004", # RoundModulation
|
||||
)
|
||||
# "botched", # as default: but with activeFaults, ghost zones & unknown types
|
||||
|
||||
@@ -0,0 +1,67 @@
|
||||
{
|
||||
"dailySchedules": [
|
||||
{
|
||||
"dayOfWeek": "Monday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 5.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Tuesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 5.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Wednesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 5.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Thursday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 5.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Friday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 5.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Saturday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 5.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Sunday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 5.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
151
tests/components/evohome/fixtures/h157546/schedule_10090506.json
Normal file
151
tests/components/evohome/fixtures/h157546/schedule_10090506.json
Normal file
@@ -0,0 +1,151 @@
|
||||
{
|
||||
"dailySchedules": [
|
||||
{
|
||||
"dayOfWeek": "Monday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "08:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "21:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Tuesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "08:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Wednesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "08:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "21:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Thursday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "08:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "21:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Friday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "08:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "22:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Saturday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "01:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "09:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "11:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "22:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Sunday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "01:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "09:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "11:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "21:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
{
|
||||
"dailySchedules": [
|
||||
{
|
||||
"dayOfWeek": "Monday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Tuesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Wednesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Thursday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Friday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Saturday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Sunday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
123
tests/components/evohome/fixtures/h157546/schedule_10090508.json
Normal file
123
tests/components/evohome/fixtures/h157546/schedule_10090508.json
Normal file
@@ -0,0 +1,123 @@
|
||||
{
|
||||
"dailySchedules": [
|
||||
{
|
||||
"dayOfWeek": "Monday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "08:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Tuesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "08:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Wednesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "08:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Thursday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "09:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Friday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 18.0,
|
||||
"timeOfDay": "07:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "09:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Saturday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Sunday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 15.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
115
tests/components/evohome/fixtures/h157546/schedule_10090509.json
Normal file
115
tests/components/evohome/fixtures/h157546/schedule_10090509.json
Normal file
@@ -0,0 +1,115 @@
|
||||
{
|
||||
"dailySchedules": [
|
||||
{
|
||||
"dayOfWeek": "Monday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "08:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 20.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "23:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Tuesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "08:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 20.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "23:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Wednesday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "08:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 20.0,
|
||||
"timeOfDay": "17:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "23:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Thursday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "08:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 20.0,
|
||||
"timeOfDay": "17:30:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "23:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Friday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "08:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 20.0,
|
||||
"timeOfDay": "17:30:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Saturday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 20.0,
|
||||
"timeOfDay": "11:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"dayOfWeek": "Sunday",
|
||||
"switchpoints": [
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "00:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 20.0,
|
||||
"timeOfDay": "11:00:00"
|
||||
},
|
||||
{
|
||||
"heatSetpoint": 19.0,
|
||||
"timeOfDay": "23:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
{
|
||||
"locationId": "7647411",
|
||||
"gateways": [
|
||||
{
|
||||
"gatewayId": "7539089",
|
||||
"temperatureControlSystems": [
|
||||
{
|
||||
"systemId": "10090510",
|
||||
"zones": [
|
||||
{
|
||||
"zoneId": "10090505",
|
||||
"temperatureStatus": {
|
||||
"temperature": 15.5,
|
||||
"isAvailable": true
|
||||
},
|
||||
"activeFaults": [],
|
||||
"setpointStatus": {
|
||||
"targetHeatTemperature": 5.0,
|
||||
"setpointMode": "FollowSchedule"
|
||||
},
|
||||
"name": "Ba******"
|
||||
},
|
||||
{
|
||||
"zoneId": "10090506",
|
||||
"temperatureStatus": {
|
||||
"temperature": 18.0,
|
||||
"isAvailable": true
|
||||
},
|
||||
"activeFaults": [],
|
||||
"setpointStatus": {
|
||||
"targetHeatTemperature": 18.0,
|
||||
"setpointMode": "FollowSchedule"
|
||||
},
|
||||
"name": "Sl********"
|
||||
},
|
||||
{
|
||||
"zoneId": "10090507",
|
||||
"temperatureStatus": {
|
||||
"temperature": 16.0,
|
||||
"isAvailable": true
|
||||
},
|
||||
"activeFaults": [],
|
||||
"setpointStatus": {
|
||||
"targetHeatTemperature": 15.0,
|
||||
"setpointMode": "FollowSchedule"
|
||||
},
|
||||
"name": "Ka*********"
|
||||
},
|
||||
{
|
||||
"zoneId": "10090508",
|
||||
"temperatureStatus": {
|
||||
"temperature": 17.0,
|
||||
"isAvailable": true
|
||||
},
|
||||
"activeFaults": [],
|
||||
"setpointStatus": {
|
||||
"targetHeatTemperature": 15.0,
|
||||
"setpointMode": "FollowSchedule"
|
||||
},
|
||||
"name": "Ka*****"
|
||||
},
|
||||
{
|
||||
"zoneId": "10090509",
|
||||
"temperatureStatus": {
|
||||
"temperature": 19.0,
|
||||
"isAvailable": true
|
||||
},
|
||||
"activeFaults": [],
|
||||
"setpointStatus": {
|
||||
"targetHeatTemperature": 19.0,
|
||||
"setpointMode": "FollowSchedule"
|
||||
},
|
||||
"name": "Wo*******"
|
||||
}
|
||||
],
|
||||
"activeFaults": [],
|
||||
"systemModeStatus": {
|
||||
"mode": "Auto",
|
||||
"isPermanent": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"activeFaults": []
|
||||
}
|
||||
]
|
||||
}
|
||||
11
tests/components/evohome/fixtures/h157546/user_account.json
Normal file
11
tests/components/evohome/fixtures/h157546/user_account.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"userId": "6297358",
|
||||
"username": "nobody@nowhere.com",
|
||||
"firstname": "Da***",
|
||||
"lastname": "Sp*****",
|
||||
"streetAddress": "********** **",
|
||||
"city": "**********",
|
||||
"postcode": "******",
|
||||
"country": "Netherlands",
|
||||
"language": "nlNL"
|
||||
}
|
||||
224
tests/components/evohome/fixtures/h157546/user_locations.json
Normal file
224
tests/components/evohome/fixtures/h157546/user_locations.json
Normal file
@@ -0,0 +1,224 @@
|
||||
[
|
||||
{
|
||||
"locationInfo": {
|
||||
"locationId": "7647411",
|
||||
"name": "Kl********",
|
||||
"streetAddress": "********** **",
|
||||
"city": "**********",
|
||||
"country": "Netherlands",
|
||||
"postcode": "******",
|
||||
"locationType": "Residential",
|
||||
"useDaylightSaveSwitching": true,
|
||||
"timeZone": {
|
||||
"timeZoneId": "WEuropeStandardTime",
|
||||
"displayName": "(UTC+01:00) Amsterdam, Berlijn, Bern, Rome, Stockholm, Wenen",
|
||||
"offsetMinutes": 60,
|
||||
"currentOffsetMinutes": 60,
|
||||
"supportsDaylightSaving": true
|
||||
},
|
||||
"locationOwner": {
|
||||
"userId": "6297358",
|
||||
"username": "nobody@nowhere.com",
|
||||
"firstname": "Da***",
|
||||
"lastname": "Sp*****"
|
||||
}
|
||||
},
|
||||
"gateways": [
|
||||
{
|
||||
"gatewayInfo": {
|
||||
"gatewayId": "7539089",
|
||||
"mac": "************",
|
||||
"crc": "****",
|
||||
"isWiFi": false
|
||||
},
|
||||
"temperatureControlSystems": [
|
||||
{
|
||||
"systemId": "10090510",
|
||||
"modelType": "EvoTouch",
|
||||
"zones": [
|
||||
{
|
||||
"zoneId": "10090505",
|
||||
"modelType": "HeatingZone",
|
||||
"setpointCapabilities": {
|
||||
"maxHeatSetpoint": 35.0,
|
||||
"minHeatSetpoint": 5.0,
|
||||
"valueResolution": 0.5,
|
||||
"canControlHeat": true,
|
||||
"canControlCool": false,
|
||||
"allowedSetpointModes": [
|
||||
"PermanentOverride",
|
||||
"FollowSchedule",
|
||||
"TemporaryOverride"
|
||||
],
|
||||
"maxDuration": "1.00:00:00",
|
||||
"timingResolution": "00:10:00"
|
||||
},
|
||||
"scheduleCapabilities": {
|
||||
"maxSwitchpointsPerDay": 6,
|
||||
"minSwitchpointsPerDay": 1,
|
||||
"timingResolution": "00:10:00",
|
||||
"setpointValueResolution": 0.5
|
||||
},
|
||||
"name": "Ba******",
|
||||
"zoneType": "UnderfloorHeating"
|
||||
},
|
||||
{
|
||||
"zoneId": "10090506",
|
||||
"modelType": "HeatingZone",
|
||||
"setpointCapabilities": {
|
||||
"maxHeatSetpoint": 35.0,
|
||||
"minHeatSetpoint": 5.0,
|
||||
"valueResolution": 0.5,
|
||||
"canControlHeat": true,
|
||||
"canControlCool": false,
|
||||
"allowedSetpointModes": [
|
||||
"PermanentOverride",
|
||||
"FollowSchedule",
|
||||
"TemporaryOverride"
|
||||
],
|
||||
"maxDuration": "1.00:00:00",
|
||||
"timingResolution": "00:10:00"
|
||||
},
|
||||
"scheduleCapabilities": {
|
||||
"maxSwitchpointsPerDay": 6,
|
||||
"minSwitchpointsPerDay": 1,
|
||||
"timingResolution": "00:10:00",
|
||||
"setpointValueResolution": 0.5
|
||||
},
|
||||
"name": "Sl********",
|
||||
"zoneType": "UnderfloorHeating"
|
||||
},
|
||||
{
|
||||
"zoneId": "10090507",
|
||||
"modelType": "HeatingZone",
|
||||
"setpointCapabilities": {
|
||||
"maxHeatSetpoint": 35.0,
|
||||
"minHeatSetpoint": 5.0,
|
||||
"valueResolution": 0.5,
|
||||
"canControlHeat": true,
|
||||
"canControlCool": false,
|
||||
"allowedSetpointModes": [
|
||||
"PermanentOverride",
|
||||
"FollowSchedule",
|
||||
"TemporaryOverride"
|
||||
],
|
||||
"maxDuration": "1.00:00:00",
|
||||
"timingResolution": "00:10:00"
|
||||
},
|
||||
"scheduleCapabilities": {
|
||||
"maxSwitchpointsPerDay": 6,
|
||||
"minSwitchpointsPerDay": 1,
|
||||
"timingResolution": "00:10:00",
|
||||
"setpointValueResolution": 0.5
|
||||
},
|
||||
"name": "Ka*********",
|
||||
"zoneType": "UnderfloorHeating"
|
||||
},
|
||||
{
|
||||
"zoneId": "10090508",
|
||||
"modelType": "HeatingZone",
|
||||
"setpointCapabilities": {
|
||||
"maxHeatSetpoint": 35.0,
|
||||
"minHeatSetpoint": 5.0,
|
||||
"valueResolution": 0.5,
|
||||
"canControlHeat": true,
|
||||
"canControlCool": false,
|
||||
"allowedSetpointModes": [
|
||||
"PermanentOverride",
|
||||
"FollowSchedule",
|
||||
"TemporaryOverride"
|
||||
],
|
||||
"maxDuration": "1.00:00:00",
|
||||
"timingResolution": "00:10:00"
|
||||
},
|
||||
"scheduleCapabilities": {
|
||||
"maxSwitchpointsPerDay": 6,
|
||||
"minSwitchpointsPerDay": 1,
|
||||
"timingResolution": "00:10:00",
|
||||
"setpointValueResolution": 0.5
|
||||
},
|
||||
"name": "Ka*****",
|
||||
"zoneType": "UnderfloorHeating"
|
||||
},
|
||||
{
|
||||
"zoneId": "10090509",
|
||||
"modelType": "HeatingZone",
|
||||
"setpointCapabilities": {
|
||||
"maxHeatSetpoint": 35.0,
|
||||
"minHeatSetpoint": 5.0,
|
||||
"valueResolution": 0.5,
|
||||
"canControlHeat": true,
|
||||
"canControlCool": false,
|
||||
"allowedSetpointModes": [
|
||||
"PermanentOverride",
|
||||
"FollowSchedule",
|
||||
"TemporaryOverride"
|
||||
],
|
||||
"maxDuration": "1.00:00:00",
|
||||
"timingResolution": "00:10:00"
|
||||
},
|
||||
"scheduleCapabilities": {
|
||||
"maxSwitchpointsPerDay": 6,
|
||||
"minSwitchpointsPerDay": 1,
|
||||
"timingResolution": "00:10:00",
|
||||
"setpointValueResolution": 0.5
|
||||
},
|
||||
"name": "Wo*******",
|
||||
"zoneType": "ZoneValves"
|
||||
}
|
||||
],
|
||||
"allowedSystemModes": [
|
||||
{
|
||||
"systemMode": "Auto",
|
||||
"canBePermanent": true,
|
||||
"canBeTemporary": false
|
||||
},
|
||||
{
|
||||
"systemMode": "AutoWithEco",
|
||||
"canBePermanent": true,
|
||||
"canBeTemporary": true,
|
||||
"maxDuration": "1.00:00:00",
|
||||
"timingResolution": "01:00:00",
|
||||
"timingMode": "Duration"
|
||||
},
|
||||
{
|
||||
"systemMode": "AutoWithReset",
|
||||
"canBePermanent": true,
|
||||
"canBeTemporary": false
|
||||
},
|
||||
{
|
||||
"systemMode": "Away",
|
||||
"canBePermanent": true,
|
||||
"canBeTemporary": true,
|
||||
"maxDuration": "99.00:00:00",
|
||||
"timingResolution": "1.00:00:00",
|
||||
"timingMode": "Period"
|
||||
},
|
||||
{
|
||||
"systemMode": "DayOff",
|
||||
"canBePermanent": true,
|
||||
"canBeTemporary": true,
|
||||
"maxDuration": "99.00:00:00",
|
||||
"timingResolution": "1.00:00:00",
|
||||
"timingMode": "Period"
|
||||
},
|
||||
{
|
||||
"systemMode": "HeatingOff",
|
||||
"canBePermanent": true,
|
||||
"canBeTemporary": false
|
||||
},
|
||||
{
|
||||
"systemMode": "Custom",
|
||||
"canBePermanent": true,
|
||||
"canBeTemporary": true,
|
||||
"maxDuration": "99.00:00:00",
|
||||
"timingResolution": "1.00:00:00",
|
||||
"timingMode": "Period"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -39,6 +39,16 @@
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_ctl_set_hvac_mode[h157546]
|
||||
list([
|
||||
tuple(
|
||||
<SystemMode.HEATING_OFF: 'HeatingOff'>,
|
||||
),
|
||||
tuple(
|
||||
<SystemMode.AUTO: 'Auto'>,
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_ctl_set_hvac_mode[minimal]
|
||||
list([
|
||||
tuple(
|
||||
@@ -87,6 +97,13 @@
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_ctl_turn_off[h157546]
|
||||
list([
|
||||
tuple(
|
||||
<SystemMode.HEATING_OFF: 'HeatingOff'>,
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_ctl_turn_off[minimal]
|
||||
list([
|
||||
tuple(
|
||||
@@ -129,6 +146,13 @@
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_ctl_turn_on[h157546]
|
||||
list([
|
||||
tuple(
|
||||
<SystemMode.AUTO: 'Auto'>,
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_ctl_turn_on[minimal]
|
||||
list([
|
||||
tuple(
|
||||
@@ -1272,6 +1296,280 @@
|
||||
'state': 'heat',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_platform[h157546][climate.ba-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 15.5,
|
||||
'friendly_name': 'Ba******',
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
]),
|
||||
'max_temp': 35.0,
|
||||
'min_temp': 5.0,
|
||||
'preset_mode': 'none',
|
||||
'preset_modes': list([
|
||||
'none',
|
||||
'temporary',
|
||||
'permanent',
|
||||
]),
|
||||
'status': dict({
|
||||
'activeFaults': tuple(
|
||||
),
|
||||
'setpoint_status': dict({
|
||||
'setpoint_mode': 'FollowSchedule',
|
||||
'target_heat_temperature': 5.0,
|
||||
}),
|
||||
'setpoints': dict({
|
||||
'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'next_sp_temp': 18.6,
|
||||
'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'this_sp_temp': 16.0,
|
||||
}),
|
||||
'temperature_status': dict({
|
||||
'is_available': True,
|
||||
'temperature': 15.5,
|
||||
}),
|
||||
'zone_id': '10090505',
|
||||
}),
|
||||
'supported_features': <ClimateEntityFeature: 401>,
|
||||
'temperature': 5.0,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.ba',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_platform[h157546][climate.ka-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 16.0,
|
||||
'friendly_name': 'Ka*********',
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
]),
|
||||
'max_temp': 35.0,
|
||||
'min_temp': 5.0,
|
||||
'preset_mode': 'none',
|
||||
'preset_modes': list([
|
||||
'none',
|
||||
'temporary',
|
||||
'permanent',
|
||||
]),
|
||||
'status': dict({
|
||||
'activeFaults': tuple(
|
||||
),
|
||||
'setpoint_status': dict({
|
||||
'setpoint_mode': 'FollowSchedule',
|
||||
'target_heat_temperature': 15.0,
|
||||
}),
|
||||
'setpoints': dict({
|
||||
'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'next_sp_temp': 18.6,
|
||||
'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'this_sp_temp': 16.0,
|
||||
}),
|
||||
'temperature_status': dict({
|
||||
'is_available': True,
|
||||
'temperature': 16.0,
|
||||
}),
|
||||
'zone_id': '10090507',
|
||||
}),
|
||||
'supported_features': <ClimateEntityFeature: 401>,
|
||||
'temperature': 15.0,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.ka',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'heat',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_platform[h157546][climate.ka_2-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 17.0,
|
||||
'friendly_name': 'Ka*****',
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
]),
|
||||
'max_temp': 35.0,
|
||||
'min_temp': 5.0,
|
||||
'preset_mode': 'none',
|
||||
'preset_modes': list([
|
||||
'none',
|
||||
'temporary',
|
||||
'permanent',
|
||||
]),
|
||||
'status': dict({
|
||||
'activeFaults': tuple(
|
||||
),
|
||||
'setpoint_status': dict({
|
||||
'setpoint_mode': 'FollowSchedule',
|
||||
'target_heat_temperature': 15.0,
|
||||
}),
|
||||
'setpoints': dict({
|
||||
'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'next_sp_temp': 18.6,
|
||||
'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'this_sp_temp': 16.0,
|
||||
}),
|
||||
'temperature_status': dict({
|
||||
'is_available': True,
|
||||
'temperature': 17.0,
|
||||
}),
|
||||
'zone_id': '10090508',
|
||||
}),
|
||||
'supported_features': <ClimateEntityFeature: 401>,
|
||||
'temperature': 15.0,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.ka_2',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'heat',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_platform[h157546][climate.kl-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 17.1,
|
||||
'friendly_name': 'Kl********',
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
]),
|
||||
'icon': 'mdi:thermostat',
|
||||
'max_temp': 35,
|
||||
'min_temp': 7,
|
||||
'preset_mode': None,
|
||||
'preset_modes': list([
|
||||
'eco',
|
||||
'Reset',
|
||||
'away',
|
||||
'home',
|
||||
'Custom',
|
||||
]),
|
||||
'status': dict({
|
||||
'activeSystemFaults': tuple(
|
||||
),
|
||||
'system_id': '10090510',
|
||||
'system_mode_status': dict({
|
||||
'is_permanent': True,
|
||||
'mode': 'Auto',
|
||||
}),
|
||||
}),
|
||||
'supported_features': <ClimateEntityFeature: 400>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.kl',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'heat',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_platform[h157546][climate.sl-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 18.0,
|
||||
'friendly_name': 'Sl********',
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
]),
|
||||
'max_temp': 35.0,
|
||||
'min_temp': 5.0,
|
||||
'preset_mode': 'none',
|
||||
'preset_modes': list([
|
||||
'none',
|
||||
'temporary',
|
||||
'permanent',
|
||||
]),
|
||||
'status': dict({
|
||||
'activeFaults': tuple(
|
||||
),
|
||||
'setpoint_status': dict({
|
||||
'setpoint_mode': 'FollowSchedule',
|
||||
'target_heat_temperature': 18.0,
|
||||
}),
|
||||
'setpoints': dict({
|
||||
'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'next_sp_temp': 18.6,
|
||||
'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'this_sp_temp': 16.0,
|
||||
}),
|
||||
'temperature_status': dict({
|
||||
'is_available': True,
|
||||
'temperature': 18.0,
|
||||
}),
|
||||
'zone_id': '10090506',
|
||||
}),
|
||||
'supported_features': <ClimateEntityFeature: 401>,
|
||||
'temperature': 18.0,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.sl',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'heat',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_platform[h157546][climate.wo-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 19.0,
|
||||
'friendly_name': 'Wo*******',
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
]),
|
||||
'max_temp': 35.0,
|
||||
'min_temp': 5.0,
|
||||
'preset_mode': 'none',
|
||||
'preset_modes': list([
|
||||
'none',
|
||||
'temporary',
|
||||
'permanent',
|
||||
]),
|
||||
'status': dict({
|
||||
'activeFaults': tuple(
|
||||
),
|
||||
'setpoint_status': dict({
|
||||
'setpoint_mode': 'FollowSchedule',
|
||||
'target_heat_temperature': 19.0,
|
||||
}),
|
||||
'setpoints': dict({
|
||||
'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'next_sp_temp': 18.6,
|
||||
'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')),
|
||||
'this_sp_temp': 16.0,
|
||||
}),
|
||||
'temperature_status': dict({
|
||||
'is_available': True,
|
||||
'temperature': 19.0,
|
||||
}),
|
||||
'zone_id': '10090509',
|
||||
}),
|
||||
'supported_features': <ClimateEntityFeature: 401>,
|
||||
'temperature': 19.0,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.wo',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'heat',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_platform[minimal][climate.main_room-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
@@ -1473,6 +1771,13 @@
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_zone_set_hvac_mode[h157546]
|
||||
list([
|
||||
tuple(
|
||||
5.0,
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_zone_set_hvac_mode[minimal]
|
||||
list([
|
||||
tuple(
|
||||
@@ -1539,6 +1844,19 @@
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_zone_set_preset_mode[h157546]
|
||||
list([
|
||||
tuple(
|
||||
5.0,
|
||||
),
|
||||
tuple(
|
||||
5.0,
|
||||
),
|
||||
dict({
|
||||
'until': HAFakeDatetime(2024, 7, 10, 20, 10, tzinfo=datetime.timezone.utc),
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_zone_set_preset_mode[minimal]
|
||||
list([
|
||||
tuple(
|
||||
@@ -1593,6 +1911,13 @@
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_zone_set_temperature[h157546]
|
||||
list([
|
||||
dict({
|
||||
'until': HAFakeDatetime(2024, 7, 10, 20, 10, tzinfo=datetime.timezone.utc),
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_zone_set_temperature[minimal]
|
||||
list([
|
||||
dict({
|
||||
@@ -1635,6 +1960,13 @@
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_zone_turn_off[h157546]
|
||||
list([
|
||||
tuple(
|
||||
5.0,
|
||||
),
|
||||
])
|
||||
# ---
|
||||
# name: test_zone_turn_off[minimal]
|
||||
list([
|
||||
tuple(
|
||||
|
||||
@@ -14,6 +14,9 @@
|
||||
# name: test_setup[h139906]
|
||||
dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override'])
|
||||
# ---
|
||||
# name: test_setup[h157546]
|
||||
dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override'])
|
||||
# ---
|
||||
# name: test_setup[minimal]
|
||||
dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override'])
|
||||
# ---
|
||||
|
||||
@@ -695,3 +695,199 @@ async def test_websocket_backup_timeout_handling(
|
||||
|
||||
assert not msg["success"]
|
||||
assert msg["error"]["code"] == "unknown_error"
|
||||
|
||||
|
||||
async def test_websocket_subscribe_feature(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test subscribing to a specific preview feature."""
|
||||
hass.config.components.add("kitchen_sink")
|
||||
assert await async_setup(hass, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/subscribe",
|
||||
"domain": "kitchen_sink",
|
||||
"preview_feature": "special_repair",
|
||||
}
|
||||
)
|
||||
msg = await client.receive_json()
|
||||
|
||||
assert msg["success"]
|
||||
assert msg["result"] is None
|
||||
|
||||
# Initial state is sent as event
|
||||
event_msg = await client.receive_json()
|
||||
assert event_msg["type"] == "event"
|
||||
assert event_msg["event"] == {
|
||||
"preview_feature": "special_repair",
|
||||
"domain": "kitchen_sink",
|
||||
"enabled": False,
|
||||
"is_built_in": True,
|
||||
"feedback_url": ANY,
|
||||
"learn_more_url": ANY,
|
||||
"report_issue_url": ANY,
|
||||
}
|
||||
|
||||
|
||||
async def test_websocket_subscribe_feature_receives_updates(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test that subscription receives updates when feature is toggled."""
|
||||
hass.config.components.add("kitchen_sink")
|
||||
assert await async_setup(hass, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/subscribe",
|
||||
"domain": "kitchen_sink",
|
||||
"preview_feature": "special_repair",
|
||||
}
|
||||
)
|
||||
subscribe_msg = await client.receive_json()
|
||||
assert subscribe_msg["success"]
|
||||
subscription_id = subscribe_msg["id"]
|
||||
|
||||
# Initial state event
|
||||
initial_event_msg = await client.receive_json()
|
||||
assert initial_event_msg["id"] == subscription_id
|
||||
assert initial_event_msg["type"] == "event"
|
||||
assert initial_event_msg["event"]["enabled"] is False
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/update",
|
||||
"domain": "kitchen_sink",
|
||||
"preview_feature": "special_repair",
|
||||
"enabled": True,
|
||||
}
|
||||
)
|
||||
|
||||
# Update event arrives before the update result
|
||||
event_msg = await client.receive_json()
|
||||
assert event_msg["id"] == subscription_id
|
||||
assert event_msg["type"] == "event"
|
||||
assert event_msg["event"] == {
|
||||
"preview_feature": "special_repair",
|
||||
"domain": "kitchen_sink",
|
||||
"enabled": True,
|
||||
"is_built_in": True,
|
||||
"feedback_url": ANY,
|
||||
"learn_more_url": ANY,
|
||||
"report_issue_url": ANY,
|
||||
}
|
||||
|
||||
update_msg = await client.receive_json()
|
||||
assert update_msg["success"]
|
||||
|
||||
|
||||
async def test_websocket_subscribe_nonexistent_feature(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test subscribing to a preview feature that doesn't exist."""
|
||||
assert await async_setup(hass, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/subscribe",
|
||||
"domain": "nonexistent",
|
||||
"preview_feature": "feature",
|
||||
}
|
||||
)
|
||||
msg = await client.receive_json()
|
||||
|
||||
assert not msg["success"]
|
||||
assert msg["error"]["code"] == "not_found"
|
||||
assert "not found" in msg["error"]["message"].lower()
|
||||
|
||||
|
||||
async def test_websocket_subscribe_does_not_require_admin(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
hass_admin_user: MockUser,
|
||||
) -> None:
|
||||
"""Test that subscribe does not require admin privileges."""
|
||||
hass_admin_user.groups = []
|
||||
|
||||
hass.config.components.add("kitchen_sink")
|
||||
assert await async_setup(hass, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/subscribe",
|
||||
"domain": "kitchen_sink",
|
||||
"preview_feature": "special_repair",
|
||||
}
|
||||
)
|
||||
msg = await client.receive_json()
|
||||
|
||||
assert msg["success"]
|
||||
|
||||
# Consume initial state event
|
||||
await client.receive_json()
|
||||
|
||||
|
||||
async def test_websocket_subscribe_only_receives_subscribed_feature_updates(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test that subscription only receives updates for the subscribed feature."""
|
||||
hass.config.components.add("kitchen_sink")
|
||||
assert await async_setup(hass, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/subscribe",
|
||||
"domain": "kitchen_sink",
|
||||
"preview_feature": "special_repair",
|
||||
}
|
||||
)
|
||||
subscribe_msg = await client.receive_json()
|
||||
assert subscribe_msg["success"]
|
||||
|
||||
# Consume initial state event
|
||||
await client.receive_json()
|
||||
|
||||
# Fire an event for a different feature
|
||||
hass.bus.async_fire(
|
||||
EVENT_LABS_UPDATED,
|
||||
{"domain": "other_domain", "preview_feature": "other_feature", "enabled": True},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/update",
|
||||
"domain": "kitchen_sink",
|
||||
"preview_feature": "special_repair",
|
||||
"enabled": True,
|
||||
}
|
||||
)
|
||||
|
||||
# Event message arrives before the update result
|
||||
# Should only receive event for subscribed feature, not the other one
|
||||
event_msg = await client.receive_json()
|
||||
assert event_msg["type"] == "event"
|
||||
assert event_msg["event"]["domain"] == "kitchen_sink"
|
||||
assert event_msg["event"]["preview_feature"] == "special_repair"
|
||||
|
||||
update_msg = await client.receive_json()
|
||||
assert update_msg["success"]
|
||||
|
||||
@@ -4,8 +4,9 @@ from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from homeassistant.components.rituals_perfume_genie.const import ACCOUNT_HASH, DOMAIN
|
||||
from homeassistant.components.rituals_perfume_genie.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry, load_json_object_fixture
|
||||
@@ -17,7 +18,11 @@ def mock_config_entry(unique_id: str, entry_id: str = "an_entry_id") -> MockConf
|
||||
domain=DOMAIN,
|
||||
title="name@example.com",
|
||||
unique_id=unique_id,
|
||||
data={ACCOUNT_HASH: "an_account_hash"},
|
||||
data={
|
||||
CONF_EMAIL: "test@rituals.com",
|
||||
CONF_PASSWORD: "test-password",
|
||||
},
|
||||
version=2,
|
||||
entry_id=entry_id,
|
||||
)
|
||||
|
||||
@@ -90,13 +95,15 @@ async def init_integration(
|
||||
"""Initialize the Rituals Perfume Genie integration with the given Config Entry and Diffuser list."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"homeassistant.components.rituals_perfume_genie.Account.get_devices",
|
||||
return_value=mock_diffusers,
|
||||
):
|
||||
"homeassistant.components.rituals_perfume_genie.Account"
|
||||
) as mock_account_cls:
|
||||
mock_account = mock_account_cls.return_value
|
||||
mock_account.authenticate = AsyncMock()
|
||||
mock_account.get_devices = AsyncMock(return_value=mock_diffusers)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
assert mock_config_entry.entry_id in hass.data[DOMAIN]
|
||||
assert hass.data[DOMAIN]
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
64
tests/components/rituals_perfume_genie/conftest.py
Normal file
64
tests/components/rituals_perfume_genie/conftest.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Fixtures for Rituals Perfume Genie tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.rituals_perfume_genie import ACCOUNT_HASH, DOMAIN
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
|
||||
from .const import TEST_EMAIL, TEST_PASSWORD
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
"""Override async_setup_entry."""
|
||||
with patch(
|
||||
"homeassistant.components.rituals_perfume_genie.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_rituals_account() -> Generator[AsyncMock]:
|
||||
"""Mock Rituals Account."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.rituals_perfume_genie.config_flow.Account",
|
||||
autospec=True,
|
||||
) as mock_account_cls,
|
||||
patch(
|
||||
"homeassistant.components.rituals_perfume_genie.Account",
|
||||
new=mock_account_cls,
|
||||
),
|
||||
):
|
||||
mock_account = mock_account_cls.return_value
|
||||
yield mock_account
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Mock Rituals Account."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=TEST_EMAIL,
|
||||
data={CONF_EMAIL: TEST_EMAIL, CONF_PASSWORD: TEST_PASSWORD},
|
||||
title=TEST_EMAIL,
|
||||
version=2,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def old_mock_config_entry() -> MockConfigEntry:
|
||||
"""Mock Rituals Account."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=TEST_EMAIL,
|
||||
data={ACCOUNT_HASH: "old_hash_should_be_removed"},
|
||||
title=TEST_EMAIL,
|
||||
version=1,
|
||||
)
|
||||
4
tests/components/rituals_perfume_genie/const.py
Normal file
4
tests/components/rituals_perfume_genie/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for rituals_perfume_genie tests."""
|
||||
|
||||
TEST_EMAIL = "test@rituals.com"
|
||||
TEST_PASSWORD = "test-password"
|
||||
@@ -1,126 +1,213 @@
|
||||
"""Test the Rituals Perfume Genie config flow."""
|
||||
|
||||
from http import HTTPStatus
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from aiohttp import ClientError
|
||||
from pyrituals import AuthenticationException
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.rituals_perfume_genie.const import ACCOUNT_HASH, DOMAIN
|
||||
from homeassistant.components.rituals_perfume_genie.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
TEST_EMAIL = "rituals@example.com"
|
||||
VALID_PASSWORD = "passw0rd"
|
||||
WRONG_PASSWORD = "wrong-passw0rd"
|
||||
from .const import TEST_EMAIL, TEST_PASSWORD
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
def _mock_account(*_):
|
||||
account = MagicMock()
|
||||
account.authenticate = AsyncMock()
|
||||
account.account_hash = "any"
|
||||
account.email = TEST_EMAIL
|
||||
return account
|
||||
|
||||
|
||||
async def test_form(hass: HomeAssistant) -> None:
|
||||
"""Test we get the form."""
|
||||
async def test_user_flow_success(
|
||||
hass: HomeAssistant, mock_rituals_account: AsyncMock, mock_setup_entry: AsyncMock
|
||||
) -> None:
|
||||
"""Test successful user flow setup."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] is None
|
||||
assert result["step_id"] == "user"
|
||||
assert result["errors"] == {}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.rituals_perfume_genie.config_flow.Account",
|
||||
side_effect=_mock_account,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.rituals_perfume_genie.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry,
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: VALID_PASSWORD,
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: TEST_PASSWORD,
|
||||
},
|
||||
)
|
||||
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result2["title"] == TEST_EMAIL
|
||||
assert isinstance(result2["data"][ACCOUNT_HASH], str)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == TEST_EMAIL
|
||||
assert result["data"] == {
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: TEST_PASSWORD,
|
||||
}
|
||||
assert result["result"].unique_id == TEST_EMAIL
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_invalid_auth(hass: HomeAssistant) -> None:
|
||||
"""Test we handle invalid auth."""
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "error"),
|
||||
[
|
||||
(AuthenticationException, "invalid_auth"),
|
||||
(ClientError, "cannot_connect"),
|
||||
],
|
||||
)
|
||||
async def test_user_flow_errors(
|
||||
hass: HomeAssistant,
|
||||
mock_rituals_account: AsyncMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
exception: Exception,
|
||||
error: str,
|
||||
) -> None:
|
||||
"""Test user flow with different errors."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
mock_rituals_account.authenticate.side_effect = exception
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: TEST_PASSWORD,
|
||||
},
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.rituals_perfume_genie.config_flow.Account.authenticate",
|
||||
side_effect=AuthenticationException,
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: WRONG_PASSWORD,
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": error}
|
||||
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
assert result2["errors"] == {"base": "invalid_auth"}
|
||||
mock_rituals_account.authenticate.side_effect = None
|
||||
|
||||
|
||||
async def test_form_auth_exception(hass: HomeAssistant) -> None:
|
||||
"""Test we handle auth exception."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: TEST_PASSWORD,
|
||||
},
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.rituals_perfume_genie.config_flow.Account.authenticate",
|
||||
side_effect=Exception,
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: VALID_PASSWORD,
|
||||
},
|
||||
)
|
||||
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
assert result2["errors"] == {"base": "unknown"}
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
|
||||
|
||||
async def test_form_cannot_connect(hass: HomeAssistant) -> None:
|
||||
"""Test we handle cannot connect error."""
|
||||
async def test_duplicate_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_rituals_account: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test user flow with invalid credentials."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.rituals_perfume_genie.config_flow.Account.authenticate",
|
||||
side_effect=ClientResponseError(
|
||||
None, None, status=HTTPStatus.INTERNAL_SERVER_ERROR
|
||||
),
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: VALID_PASSWORD,
|
||||
},
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: TEST_PASSWORD,
|
||||
},
|
||||
)
|
||||
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
assert result2["errors"] == {"base": "cannot_connect"}
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_reauth_flow_success(
|
||||
hass: HomeAssistant,
|
||||
mock_rituals_account: AsyncMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test successful reauth flow (updating credentials)."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await mock_config_entry.start_reauth_flow(hass)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reauth_confirm"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_PASSWORD: "new_correct_password"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
|
||||
assert mock_config_entry.data[CONF_PASSWORD] == "new_correct_password"
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "error"),
|
||||
[
|
||||
(AuthenticationException, "invalid_auth"),
|
||||
(ClientError, "cannot_connect"),
|
||||
],
|
||||
)
|
||||
async def test_reauth_flow_errors(
|
||||
hass: HomeAssistant,
|
||||
mock_rituals_account: AsyncMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
exception: Exception,
|
||||
error: str,
|
||||
) -> None:
|
||||
"""Test reauth flow with different errors."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
result = await mock_config_entry.start_reauth_flow(hass)
|
||||
|
||||
mock_rituals_account.authenticate.side_effect = exception
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_PASSWORD: "new_correct_password"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": error}
|
||||
|
||||
mock_rituals_account.authenticate.side_effect = None
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_PASSWORD: "new_correct_password",
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
assert mock_config_entry.data[CONF_PASSWORD] == "new_correct_password"
|
||||
|
||||
|
||||
async def test_reauth_migrated_entry(
|
||||
hass: HomeAssistant, mock_rituals_account: AsyncMock, mock_setup_entry: AsyncMock
|
||||
) -> None:
|
||||
"""Test successful reauth flow (updating credentials)."""
|
||||
mock_config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=TEST_EMAIL,
|
||||
data={},
|
||||
title=TEST_EMAIL,
|
||||
version=2,
|
||||
)
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await mock_config_entry.start_reauth_flow(hass)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reauth_confirm"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_PASSWORD: "new_correct_password"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
|
||||
assert mock_config_entry.data == {
|
||||
CONF_EMAIL: TEST_EMAIL,
|
||||
CONF_PASSWORD: "new_correct_password",
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""Tests for the Rituals Perfume Genie integration."""
|
||||
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import aiohttp
|
||||
|
||||
from homeassistant.components.rituals_perfume_genie.const import DOMAIN
|
||||
from homeassistant.components.rituals_perfume_genie.const import ACCOUNT_HASH, DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@@ -16,17 +16,39 @@ from .common import (
|
||||
mock_diffuser_v1_battery_cartridge,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
async def test_config_entry_not_ready(hass: HomeAssistant) -> None:
|
||||
|
||||
async def test_migration_v1_to_v2(
|
||||
hass: HomeAssistant,
|
||||
mock_rituals_account: AsyncMock,
|
||||
old_mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test migration from V1 (account_hash) to V2 (credentials)."""
|
||||
old_mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(old_mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert old_mock_config_entry.version == 2
|
||||
assert ACCOUNT_HASH not in old_mock_config_entry.data
|
||||
assert old_mock_config_entry.state is ConfigEntryState.SETUP_ERROR
|
||||
assert len(hass.config_entries.flow.async_progress()) == 1
|
||||
|
||||
|
||||
async def test_config_entry_not_ready(
|
||||
hass: HomeAssistant,
|
||||
mock_rituals_account: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the Rituals configuration entry setup if connection to Rituals is missing."""
|
||||
config_entry = mock_config_entry(unique_id="id_123_not_ready")
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"homeassistant.components.rituals_perfume_genie.Account.get_devices",
|
||||
side_effect=aiohttp.ClientError,
|
||||
):
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
assert config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
mock_rituals_account.get_devices.side_effect = aiohttp.ClientError
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
|
||||
|
||||
async def test_config_entry_unload(hass: HomeAssistant) -> None:
|
||||
|
||||
@@ -554,3 +554,30 @@ async def test_wall_display_screen_buttons(
|
||||
blocking=True,
|
||||
)
|
||||
mock_rpc_device.wall_display_set_screen.assert_called_once_with(value=value)
|
||||
|
||||
|
||||
async def test_rpc_remove_restart_button_for_sleeping_devices(
|
||||
hass: HomeAssistant,
|
||||
mock_rpc_device: Mock,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
device_registry: DeviceRegistry,
|
||||
entity_registry: EntityRegistry,
|
||||
) -> None:
|
||||
"""Test RPC remove restart button for sleeping devices."""
|
||||
config_entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True)
|
||||
device_entry = register_device(device_registry, config_entry)
|
||||
entity_id = register_entity(
|
||||
hass,
|
||||
BUTTON_DOMAIN,
|
||||
"test_name_restart",
|
||||
"reboot",
|
||||
config_entry,
|
||||
device_id=device_entry.id,
|
||||
)
|
||||
|
||||
assert entity_registry.async_get(entity_id) is not None
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entity_registry.async_get(entity_id) is None
|
||||
|
||||
@@ -9,11 +9,6 @@ SETUP_ENTRY_PATCHER = patch(
|
||||
"homeassistant.components.starlink.async_setup_entry", return_value=True
|
||||
)
|
||||
|
||||
STATUS_DATA_SUCCESS_PATCHER = patch(
|
||||
"homeassistant.components.starlink.coordinator.status_data",
|
||||
return_value=json.loads(load_fixture("status_data_success.json", "starlink")),
|
||||
)
|
||||
|
||||
LOCATION_DATA_SUCCESS_PATCHER = patch(
|
||||
"homeassistant.components.starlink.coordinator.location_data",
|
||||
return_value=json.loads(load_fixture("location_data_success.json", "starlink")),
|
||||
@@ -24,6 +19,12 @@ SLEEP_DATA_SUCCESS_PATCHER = patch(
|
||||
return_value=json.loads(load_fixture("sleep_data_success.json", "starlink")),
|
||||
)
|
||||
|
||||
STATUS_DATA_TARGET = "homeassistant.components.starlink.coordinator.status_data"
|
||||
STATUS_DATA_FIXTURE = json.loads(load_fixture("status_data_success.json", "starlink"))
|
||||
STATUS_DATA_SUCCESS_PATCHER = patch(
|
||||
STATUS_DATA_TARGET, return_value=STATUS_DATA_FIXTURE
|
||||
)
|
||||
|
||||
HISTORY_STATS_SUCCESS_PATCHER = patch(
|
||||
"homeassistant.components.starlink.coordinator.history_stats",
|
||||
return_value=json.loads(load_fixture("history_stats_success.json", "starlink")),
|
||||
|
||||
@@ -1,20 +1,31 @@
|
||||
"""Tests Starlink integration init/unload."""
|
||||
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from freezegun import freeze_time
|
||||
|
||||
from homeassistant.components.starlink.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_IP_ADDRESS
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .patchers import (
|
||||
HISTORY_STATS_SUCCESS_PATCHER,
|
||||
LOCATION_DATA_SUCCESS_PATCHER,
|
||||
SLEEP_DATA_SUCCESS_PATCHER,
|
||||
STATUS_DATA_FIXTURE,
|
||||
STATUS_DATA_SUCCESS_PATCHER,
|
||||
STATUS_DATA_TARGET,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry, mock_restore_cache_with_extra_data
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
async_fire_time_changed,
|
||||
mock_restore_cache_with_extra_data,
|
||||
)
|
||||
|
||||
|
||||
async def test_successful_entry(hass: HomeAssistant) -> None:
|
||||
@@ -25,9 +36,9 @@ async def test_successful_entry(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
with (
|
||||
STATUS_DATA_SUCCESS_PATCHER,
|
||||
LOCATION_DATA_SUCCESS_PATCHER,
|
||||
SLEEP_DATA_SUCCESS_PATCHER,
|
||||
STATUS_DATA_SUCCESS_PATCHER,
|
||||
HISTORY_STATS_SUCCESS_PATCHER,
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
@@ -48,9 +59,9 @@ async def test_unload_entry(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
with (
|
||||
STATUS_DATA_SUCCESS_PATCHER,
|
||||
LOCATION_DATA_SUCCESS_PATCHER,
|
||||
SLEEP_DATA_SUCCESS_PATCHER,
|
||||
STATUS_DATA_SUCCESS_PATCHER,
|
||||
HISTORY_STATS_SUCCESS_PATCHER,
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
@@ -65,7 +76,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None:
|
||||
|
||||
|
||||
async def test_restore_cache_with_accumulation(hass: HomeAssistant) -> None:
|
||||
"""Test configuring Starlink."""
|
||||
"""Test Starlink accumulation."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_IP_ADDRESS: "1.2.3.4:0000"},
|
||||
@@ -89,9 +100,9 @@ async def test_restore_cache_with_accumulation(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
with (
|
||||
STATUS_DATA_SUCCESS_PATCHER,
|
||||
LOCATION_DATA_SUCCESS_PATCHER,
|
||||
SLEEP_DATA_SUCCESS_PATCHER,
|
||||
STATUS_DATA_SUCCESS_PATCHER,
|
||||
HISTORY_STATS_SUCCESS_PATCHER,
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
@@ -112,3 +123,62 @@ async def test_restore_cache_with_accumulation(hass: HomeAssistant) -> None:
|
||||
await entry.runtime_data.async_refresh()
|
||||
|
||||
assert hass.states.get(entity_id).state == str(1 + 0.01572462736977)
|
||||
|
||||
|
||||
async def test_last_restart_state(hass: HomeAssistant) -> None:
|
||||
"""Test Starlink last restart state."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_IP_ADDRESS: "1.2.3.4:0000"},
|
||||
)
|
||||
entity_id = "sensor.starlink_last_restart"
|
||||
utc_now = datetime.fromisoformat("2025-10-22T13:31:29+00:00")
|
||||
|
||||
with (
|
||||
LOCATION_DATA_SUCCESS_PATCHER,
|
||||
SLEEP_DATA_SUCCESS_PATCHER,
|
||||
STATUS_DATA_SUCCESS_PATCHER,
|
||||
HISTORY_STATS_SUCCESS_PATCHER,
|
||||
):
|
||||
with freeze_time(utc_now):
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(entity_id).state == "2025-10-13T06:09:11+00:00"
|
||||
|
||||
with patch.object(entry.runtime_data, "always_update", return_value=True):
|
||||
status_data = deepcopy(STATUS_DATA_FIXTURE)
|
||||
status_data[0]["uptime"] = 804144
|
||||
|
||||
with (
|
||||
freeze_time(utc_now + timedelta(seconds=5)),
|
||||
patch(STATUS_DATA_TARGET, return_value=status_data),
|
||||
):
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5))
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
assert hass.states.get(entity_id).state == "2025-10-13T06:09:11+00:00"
|
||||
|
||||
status_data[0]["uptime"] = 804134
|
||||
|
||||
with (
|
||||
freeze_time(utc_now + timedelta(seconds=10)),
|
||||
patch(STATUS_DATA_TARGET, return_value=status_data),
|
||||
):
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
assert hass.states.get(entity_id).state == "2025-10-13T06:09:11+00:00"
|
||||
|
||||
status_data[0]["uptime"] = 100
|
||||
|
||||
with (
|
||||
freeze_time(utc_now + timedelta(seconds=15)),
|
||||
patch(STATUS_DATA_TARGET, return_value=status_data),
|
||||
):
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=15))
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
assert hass.states.get(entity_id).state == "2025-10-22T13:30:04+00:00"
|
||||
|
||||
@@ -600,6 +600,270 @@ async def test_legacy_deprecation(
|
||||
assert "platform: template" not in issue.translation_placeholders["config"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("domain", "config", "strings_to_check"),
|
||||
[
|
||||
(
|
||||
"light",
|
||||
{
|
||||
"light": {
|
||||
"platform": "template",
|
||||
"lights": {
|
||||
"garage_light_template": {
|
||||
"friendly_name": "Garage Light Template",
|
||||
"min_mireds_template": 153,
|
||||
"max_mireds_template": 500,
|
||||
"turn_on": [],
|
||||
"turn_off": [],
|
||||
"set_temperature": [],
|
||||
"set_hs": [],
|
||||
"set_level": [],
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
[
|
||||
"turn_on: []",
|
||||
"turn_off: []",
|
||||
"set_temperature: []",
|
||||
"set_hs: []",
|
||||
"set_level: []",
|
||||
],
|
||||
),
|
||||
(
|
||||
"switch",
|
||||
{
|
||||
"switch": {
|
||||
"platform": "template",
|
||||
"switches": {
|
||||
"my_switch": {
|
||||
"friendly_name": "Switch Template",
|
||||
"turn_on": [],
|
||||
"turn_off": [],
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
[
|
||||
"turn_on: []",
|
||||
"turn_off: []",
|
||||
],
|
||||
),
|
||||
(
|
||||
"light",
|
||||
{
|
||||
"light": [
|
||||
{
|
||||
"platform": "template",
|
||||
"lights": {
|
||||
"atrium_lichterkette": {
|
||||
"unique_id": "atrium_lichterkette",
|
||||
"friendly_name": "Atrium Lichterkette",
|
||||
"value_template": "{{ states('input_boolean.atrium_lichterkette_power') }}",
|
||||
"level_template": "{% if is_state('input_boolean.atrium_lichterkette_power', 'off') %}\n 0\n{% else %}\n {{ states('input_number.atrium_lichterkette_brightness') | int * (255 / state_attr('input_number.atrium_lichterkette_brightness', 'max') | int) }}\n{% endif %}",
|
||||
"effect_list_template": "{{ state_attr('input_select.atrium_lichterkette_mode', 'options') }}",
|
||||
"effect_template": "'{{ states('input_select.atrium_lichterkette_mode')}}'",
|
||||
"turn_on": [
|
||||
{
|
||||
"service": "button.press",
|
||||
"target": {
|
||||
"entity_id": "button.esphome_web_28a814_lichterkette_on"
|
||||
},
|
||||
},
|
||||
{
|
||||
"service": "input_boolean.turn_on",
|
||||
"target": {
|
||||
"entity_id": "input_boolean.atrium_lichterkette_power"
|
||||
},
|
||||
},
|
||||
],
|
||||
"turn_off": [
|
||||
{
|
||||
"service": "button.press",
|
||||
"target": {
|
||||
"entity_id": "button.esphome_web_28a814_lichterkette_off"
|
||||
},
|
||||
},
|
||||
{
|
||||
"service": "input_boolean.turn_off",
|
||||
"target": {
|
||||
"entity_id": "input_boolean.atrium_lichterkette_power"
|
||||
},
|
||||
},
|
||||
],
|
||||
"set_level": [
|
||||
{
|
||||
"variables": {
|
||||
"scaled": "{{ (brightness / (255 / state_attr('input_number.atrium_lichterkette_brightness', 'max'))) | round | int }}",
|
||||
"diff": "{{ scaled | int - states('input_number.atrium_lichterkette_brightness') | int }}",
|
||||
"direction": "{{ 'dim' if diff | int < 0 else 'bright' }}",
|
||||
}
|
||||
},
|
||||
{
|
||||
"repeat": {
|
||||
"count": "{{ diff | int | abs }}",
|
||||
"sequence": [
|
||||
{
|
||||
"service": "button.press",
|
||||
"target": {
|
||||
"entity_id": "button.esphome_web_28a814_lichterkette_{{ direction }}"
|
||||
},
|
||||
},
|
||||
{"delay": {"milliseconds": 500}},
|
||||
],
|
||||
}
|
||||
},
|
||||
{
|
||||
"service": "input_number.set_value",
|
||||
"data": {
|
||||
"value": "{{ scaled }}",
|
||||
"entity_id": "input_number.atrium_lichterkette_brightness",
|
||||
},
|
||||
},
|
||||
],
|
||||
"set_effect": [
|
||||
{
|
||||
"service": "button.press",
|
||||
"target": {
|
||||
"entity_id": "button.esphome_web_28a814_lichterkette_{{ effect }}"
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
[
|
||||
"scaled: ",
|
||||
"diff: ",
|
||||
"direction: ",
|
||||
],
|
||||
),
|
||||
(
|
||||
"cover",
|
||||
{
|
||||
"cover": [
|
||||
{
|
||||
"platform": "template",
|
||||
"covers": {
|
||||
"large_garage_door": {
|
||||
"device_class": "garage",
|
||||
"friendly_name": "Large Garage Door",
|
||||
"value_template": "{% if is_state('binary_sensor.large_garage_door', 'off') %}\n closed\n{% elif is_state('timer.large_garage_opening_timer', 'active') %}\n opening\n{% elif is_state('timer.large_garage_closing_timer', 'active') %} \n closing\n{% elif is_state('binary_sensor.large_garage_door', 'on') %}\n open\n{% endif %}\n",
|
||||
"open_cover": [
|
||||
{
|
||||
"condition": "state",
|
||||
"entity_id": "binary_sensor.large_garage_door",
|
||||
"state": "off",
|
||||
},
|
||||
{
|
||||
"action": "switch.turn_on",
|
||||
"target": {
|
||||
"entity_id": "switch.garage_door_relay_1"
|
||||
},
|
||||
},
|
||||
{
|
||||
"action": "timer.start",
|
||||
"entity_id": "timer.large_garage_opening_timer",
|
||||
},
|
||||
],
|
||||
"close_cover": [
|
||||
{
|
||||
"condition": "state",
|
||||
"entity_id": "binary_sensor.large_garage_door",
|
||||
"state": "on",
|
||||
},
|
||||
{
|
||||
"action": "switch.turn_on",
|
||||
"target": {
|
||||
"entity_id": "switch.garage_door_relay_1"
|
||||
},
|
||||
},
|
||||
{
|
||||
"action": "timer.start",
|
||||
"entity_id": "timer.large_garage_closing_timer",
|
||||
},
|
||||
],
|
||||
"stop_cover": [
|
||||
{
|
||||
"action": "switch.turn_on",
|
||||
"target": {
|
||||
"entity_id": "switch.garage_door_relay_1"
|
||||
},
|
||||
},
|
||||
{
|
||||
"action": "timer.cancel",
|
||||
"entity_id": "timer.large_garage_opening_timer",
|
||||
},
|
||||
{
|
||||
"action": "timer.cancel",
|
||||
"entity_id": "timer.large_garage_closing_timer",
|
||||
},
|
||||
],
|
||||
}
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
["device_class: garage"],
|
||||
),
|
||||
(
|
||||
"binary_sensor",
|
||||
{
|
||||
"binary_sensor": {
|
||||
"platform": "template",
|
||||
"sensors": {
|
||||
"motion_sensor": {
|
||||
"friendly_name": "Motion Sensor",
|
||||
"device_class": "motion",
|
||||
"value_template": "{{ is_state('sensor.motion_detector', 'on') }}",
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
["device_class: motion"],
|
||||
),
|
||||
(
|
||||
"sensor",
|
||||
{
|
||||
"sensor": {
|
||||
"platform": "template",
|
||||
"sensors": {
|
||||
"some_sensor": {
|
||||
"friendly_name": "Sensor",
|
||||
"device_class": "timestamp",
|
||||
"value_template": "{{ now().isoformat() }}",
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
["device_class: timestamp"],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_legacy_deprecation_with_unique_objects(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
config: dict,
|
||||
strings_to_check: list[str],
|
||||
issue_registry: ir.IssueRegistry,
|
||||
) -> None:
|
||||
"""Test legacy configuration raises issue and unique objects are properly converted to valid configurations."""
|
||||
|
||||
await async_setup_component(hass, domain, config)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(issue_registry.issues) == 1
|
||||
issue = next(iter(issue_registry.issues.values()))
|
||||
|
||||
assert issue.domain == "template"
|
||||
assert issue.severity == ir.IssueSeverity.WARNING
|
||||
assert issue.translation_placeholders is not None
|
||||
for string in strings_to_check:
|
||||
assert string in issue.translation_placeholders["config"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("domain", "config"),
|
||||
[
|
||||
|
||||
@@ -2345,19 +2345,20 @@ async def test_effect_template(
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("expected_min_mireds", "attribute_template"),
|
||||
("expected_min_mireds", "expected_max_kelvin", "attribute_template"),
|
||||
[
|
||||
(118, "{{118}}"),
|
||||
(153, "{{x - 12}}"),
|
||||
(153, "None"),
|
||||
(153, "{{ none }}"),
|
||||
(153, ""),
|
||||
(153, "{{ 'a' }}"),
|
||||
(118, 8474, "{{118}}"),
|
||||
(153, 6535, "{{x - 12}}"),
|
||||
(153, 6535, "None"),
|
||||
(153, 6535, "{{ none }}"),
|
||||
(153, 6535, ""),
|
||||
(153, 6535, "{{ 'a' }}"),
|
||||
],
|
||||
)
|
||||
async def test_min_mireds_template(
|
||||
hass: HomeAssistant,
|
||||
expected_min_mireds,
|
||||
expected_min_mireds: int,
|
||||
expected_max_kelvin: int,
|
||||
style: ConfigurationStyle,
|
||||
setup_light_with_mireds,
|
||||
) -> None:
|
||||
@@ -2369,6 +2370,7 @@ async def test_min_mireds_template(
|
||||
state = hass.states.get("light.test_template_light")
|
||||
assert state is not None
|
||||
assert state.attributes.get("min_mireds") == expected_min_mireds
|
||||
assert state.attributes.get("max_color_temp_kelvin") == expected_max_kelvin
|
||||
|
||||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@@ -2381,19 +2383,20 @@ async def test_min_mireds_template(
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("expected_max_mireds", "attribute_template"),
|
||||
("expected_max_mireds", "expected_min_kelvin", "attribute_template"),
|
||||
[
|
||||
(488, "{{488}}"),
|
||||
(500, "{{x - 12}}"),
|
||||
(500, "None"),
|
||||
(500, "{{ none }}"),
|
||||
(500, ""),
|
||||
(500, "{{ 'a' }}"),
|
||||
(488, 2049, "{{488}}"),
|
||||
(500, 2000, "{{x - 12}}"),
|
||||
(500, 2000, "None"),
|
||||
(500, 2000, "{{ none }}"),
|
||||
(500, 2000, ""),
|
||||
(500, 2000, "{{ 'a' }}"),
|
||||
],
|
||||
)
|
||||
async def test_max_mireds_template(
|
||||
hass: HomeAssistant,
|
||||
expected_max_mireds,
|
||||
expected_max_mireds: int,
|
||||
expected_min_kelvin: int,
|
||||
style: ConfigurationStyle,
|
||||
setup_light_with_mireds,
|
||||
) -> None:
|
||||
@@ -2405,6 +2408,7 @@ async def test_max_mireds_template(
|
||||
state = hass.states.get("light.test_template_light")
|
||||
assert state is not None
|
||||
assert state.attributes.get("max_mireds") == expected_max_mireds
|
||||
assert state.attributes.get("min_color_temp_kelvin") == expected_min_kelvin
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -182,7 +182,11 @@ async def _create_device(hass: HomeAssistant, mock_device_code: str) -> Customer
|
||||
key: DeviceFunction(
|
||||
code=key,
|
||||
type=value["type"],
|
||||
values=json_dumps(value["value"]),
|
||||
values=(
|
||||
values
|
||||
if isinstance(values := value["value"], str)
|
||||
else json_dumps(values)
|
||||
),
|
||||
)
|
||||
for key, value in details["function"].items()
|
||||
}
|
||||
@@ -190,7 +194,11 @@ async def _create_device(hass: HomeAssistant, mock_device_code: str) -> Customer
|
||||
key: DeviceStatusRange(
|
||||
code=key,
|
||||
type=value["type"],
|
||||
values=json_dumps(value["value"]),
|
||||
values=(
|
||||
values
|
||||
if isinstance(values := value["value"], str)
|
||||
else json_dumps(values)
|
||||
),
|
||||
)
|
||||
for key, value in details["status_range"].items()
|
||||
}
|
||||
|
||||
53
tests/components/tuya/fixtures/wk_B0eP8qYAdpUo4yR9.json
Normal file
53
tests/components/tuya/fixtures/wk_B0eP8qYAdpUo4yR9.json
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"endpoint": "https://apigw.tuyaeu.com",
|
||||
"mqtt_connected": true,
|
||||
"disabled_by": null,
|
||||
"disabled_polling": false,
|
||||
"name": "ITC-308-WIFI Thermostat",
|
||||
"category": "wk",
|
||||
"product_id": "B0eP8qYAdpUo4yR9",
|
||||
"product_name": "ITC-308-WIFI Thermostat",
|
||||
"online": true,
|
||||
"sub": false,
|
||||
"time_zone": "+01:00",
|
||||
"active_time": "2022-02-08T10:49:39+00:00",
|
||||
"create_time": "2022-02-08T10:49:39+00:00",
|
||||
"update_time": "2022-02-08T10:49:39+00:00",
|
||||
"function": {
|
||||
"temp_unit_convert": {
|
||||
"type": "Enum",
|
||||
"value": "{\"range\":[\"c\",\"f\"]}"
|
||||
},
|
||||
"temp_set": {
|
||||
"type": "Integer",
|
||||
"value": "{\"unit\":\"\",\"min\":-400,\"max\":2120,\"scale\":1,\"step\":5}"
|
||||
}
|
||||
},
|
||||
"status_range": {
|
||||
"temp_unit_convert": {
|
||||
"type": "Enum",
|
||||
"value": "{\"range\":[\"c\",\"f\"]}"
|
||||
},
|
||||
"temp_current": {
|
||||
"type": "Integer",
|
||||
"value": "{\"unit\":\"\u2103\",\"min\":-500,\"max\":1200,\"scale\":1,\"step\":10}"
|
||||
},
|
||||
"temp_set": {
|
||||
"type": "Integer",
|
||||
"value": "{\"unit\":\"\",\"min\":-400,\"max\":2120,\"scale\":1,\"step\":5}"
|
||||
},
|
||||
"temp_current_f": {
|
||||
"type": "Integer",
|
||||
"value": "{\"unit\":\"\u2109\",\"min\":-500,\"max\":2480,\"scale\":1,\"step\":10}"
|
||||
}
|
||||
},
|
||||
"status": {
|
||||
"temp_unit_convert": "c",
|
||||
"temp_current": 340,
|
||||
"temp_set": 350,
|
||||
"temp_current_f": 932
|
||||
},
|
||||
"set_up": true,
|
||||
"support_local": true,
|
||||
"warnings": null
|
||||
}
|
||||
@@ -654,6 +654,68 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_platform_setup_and_discovery[climate.itc_308_wifi_thermostat-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'hvac_modes': list([
|
||||
]),
|
||||
'max_temp': 212.0,
|
||||
'min_temp': -40.0,
|
||||
'target_temp_step': 0.5,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'climate',
|
||||
'entity_category': None,
|
||||
'entity_id': 'climate.itc_308_wifi_thermostat',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'tuya',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <ClimateEntityFeature: 1>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'tuya.9Ry4oUpdAYq8Pe0Bkw',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_platform_setup_and_discovery[climate.itc_308_wifi_thermostat-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 34.0,
|
||||
'friendly_name': 'ITC-308-WIFI Thermostat',
|
||||
'hvac_modes': list([
|
||||
]),
|
||||
'max_temp': 212.0,
|
||||
'min_temp': -40.0,
|
||||
'supported_features': <ClimateEntityFeature: 1>,
|
||||
'target_temp_step': 0.5,
|
||||
'temperature': 35.0,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.itc_308_wifi_thermostat',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_platform_setup_and_discovery[climate.kabinet-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -1450,6 +1512,15 @@
|
||||
'target_temp_step': 1.0,
|
||||
})
|
||||
# ---
|
||||
# name: test_us_customary_system[climate.itc_308_wifi_thermostat]
|
||||
ReadOnlyDict({
|
||||
'current_temperature': 93,
|
||||
'max_temp': 414,
|
||||
'min_temp': -40,
|
||||
'target_temp_step': 0.5,
|
||||
'temperature': 95,
|
||||
})
|
||||
# ---
|
||||
# name: test_us_customary_system[climate.kabinet]
|
||||
ReadOnlyDict({
|
||||
'current_temperature': 67,
|
||||
|
||||
@@ -1301,6 +1301,37 @@
|
||||
'via_device_id': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_device_registry[9Ry4oUpdAYq8Pe0Bkw]
|
||||
DeviceRegistryEntrySnapshot({
|
||||
'area_id': None,
|
||||
'config_entries': <ANY>,
|
||||
'config_entries_subentries': <ANY>,
|
||||
'configuration_url': None,
|
||||
'connections': set({
|
||||
}),
|
||||
'disabled_by': None,
|
||||
'entry_type': None,
|
||||
'hw_version': None,
|
||||
'id': <ANY>,
|
||||
'identifiers': set({
|
||||
tuple(
|
||||
'tuya',
|
||||
'9Ry4oUpdAYq8Pe0Bkw',
|
||||
),
|
||||
}),
|
||||
'labels': set({
|
||||
}),
|
||||
'manufacturer': 'Tuya',
|
||||
'model': 'ITC-308-WIFI Thermostat',
|
||||
'model_id': 'B0eP8qYAdpUo4yR9',
|
||||
'name': 'ITC-308-WIFI Thermostat',
|
||||
'name_by_user': None,
|
||||
'primary_config_entry': <ANY>,
|
||||
'serial_number': None,
|
||||
'sw_version': None,
|
||||
'via_device_id': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_device_registry[9c1vlsxoscm]
|
||||
DeviceRegistryEntrySnapshot({
|
||||
'area_id': None,
|
||||
|
||||
@@ -577,7 +577,7 @@
|
||||
'media_class': <MediaClass.GAME: 'game'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1297287135',
|
||||
'media_content_type': <MediaClass.GAME: 'game'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.64736.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6491fb2f-52e7-4129-bcbd-d23a67117ae0',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.64736.65457035095819016.56f55216-1bb9-40aa-8796-068cf3075fc1.6491fb2f-52e7-4129-bcbd-d23a67117ae0',
|
||||
'title': 'Blue Dragon',
|
||||
}),
|
||||
dict({
|
||||
@@ -588,7 +588,7 @@
|
||||
'media_class': <MediaClass.GAME: 'game'>,
|
||||
'media_content_id': 'media-source://xbox/271958441785640/1560034050',
|
||||
'media_content_type': <MediaClass.GAME: 'game'>,
|
||||
'thumbnail': 'http://store-images.s-microsoft.com/image/apps.46246.63309362003335928.4079e21b-b00f-4446-a680-6bf9c0eb0158.c976135a-831a-4cf6-a39b-f01c633567bc',
|
||||
'thumbnail': 'https://store-images.s-microsoft.com/image/apps.46246.63309362003335928.4079e21b-b00f-4446-a680-6bf9c0eb0158.c976135a-831a-4cf6-a39b-f01c633567bc',
|
||||
'title': "Assassin's Creed® Syndicate",
|
||||
}),
|
||||
]),
|
||||
|
||||
@@ -790,7 +790,7 @@
|
||||
'attributes': ReadOnlyDict({
|
||||
'achievements': '2 / 43',
|
||||
'developer': 'Mistwalker / Artoon',
|
||||
'entity_picture': 'http://store-images.s-microsoft.com/image/apps.35072.13670972585585116.70570f0d-17aa-4f97-b692-5412fa183673.25a97451-9369-4f6b-b66b-3427913235eb',
|
||||
'entity_picture': 'https://store-images.s-microsoft.com/image/apps.35072.13670972585585116.70570f0d-17aa-4f97-b692-5412fa183673.25a97451-9369-4f6b-b66b-3427913235eb',
|
||||
'friendly_name': 'GSR Ae Now playing',
|
||||
'gamerscore': '10 / 1000',
|
||||
'genres': 'Role Playing',
|
||||
|
||||
@@ -1813,6 +1813,36 @@ async def test_formation_strategy_form_initial_network(
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
|
||||
|
||||
async def test_formation_strategy_form_initial_network_failure(
|
||||
advanced_pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test forming a new network that fails with an exception."""
|
||||
# Mock form_network to raise an exception
|
||||
mock_app.form_network.side_effect = DelayedAsyncMock(
|
||||
side_effect=Exception("Network formation failed")
|
||||
)
|
||||
|
||||
result = await advanced_pick_radio(RadioType.ezsp)
|
||||
result_form = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={"next_step_id": config_flow.FORMATION_FORM_NEW_NETWORK},
|
||||
)
|
||||
|
||||
result2 = await consume_progress_flow(
|
||||
hass,
|
||||
flow_id=result_form["flow_id"],
|
||||
valid_step_ids=("form_new_network",),
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result2["type"] is FlowResultType.ABORT
|
||||
assert result2["reason"] == "cannot_form_network"
|
||||
assert "Network formation failed" in result2["description_placeholders"]["error"]
|
||||
|
||||
# Verify form_network was called
|
||||
mock_app.form_network.assert_called_once()
|
||||
|
||||
|
||||
@patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True))
|
||||
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
|
||||
async def test_onboarding_auto_formation_new_hardware(
|
||||
|
||||
Reference in New Issue
Block a user