mirror of
https://github.com/home-assistant/core.git
synced 2025-12-18 22:08:14 +00:00
Compare commits
91 Commits
input_bool
...
2025.12.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04746b6843 | ||
|
|
0547153730 | ||
|
|
eb024b4dde | ||
|
|
1d4817608e | ||
|
|
a37ca293e1 | ||
|
|
f3dbddee16 | ||
|
|
b26681ee88 | ||
|
|
effe72bfda | ||
|
|
076835ca1c | ||
|
|
4b9b1e611a | ||
|
|
0b4ea42810 | ||
|
|
8907608345 | ||
|
|
356ee07e22 | ||
|
|
bee3ee6320 | ||
|
|
fb72ff9bd0 | ||
|
|
412e05d8da | ||
|
|
58ee8e863e | ||
|
|
e3a47bfc51 | ||
|
|
a6cdacc8fe | ||
|
|
dd0425ab8e | ||
|
|
1d289c0083 | ||
|
|
70786a1d90 | ||
|
|
293eb69788 | ||
|
|
71d92291d1 | ||
|
|
726de64394 | ||
|
|
de04f22f89 | ||
|
|
9e8cc3a65b | ||
|
|
27fa92b607 | ||
|
|
ce5c5c5eb7 | ||
|
|
88e29df8eb | ||
|
|
a2b5744696 | ||
|
|
201c3785f5 | ||
|
|
24de26cbf5 | ||
|
|
ac0a544829 | ||
|
|
1a11b92f05 | ||
|
|
ab0811f59f | ||
|
|
68711b2f21 | ||
|
|
886e2b0af1 | ||
|
|
7492b5be75 | ||
|
|
e4f1565e3c | ||
|
|
7f37412199 | ||
|
|
eaef0160a2 | ||
|
|
f049c425ba | ||
|
|
50eee75b8f | ||
|
|
81e47f6844 | ||
|
|
ffebbab020 | ||
|
|
9824bdc1c9 | ||
|
|
a933d4a0eb | ||
|
|
f7f7f9a2de | ||
|
|
aac412f3a8 | ||
|
|
660a14e78d | ||
|
|
4aa3f0a400 | ||
|
|
0b52c806d4 | ||
|
|
bbe27d86a1 | ||
|
|
fb7941df1d | ||
|
|
c46e341941 | ||
|
|
2e3a9e3a90 | ||
|
|
55c5ecd28a | ||
|
|
e50e2487e1 | ||
|
|
74e118f85c | ||
|
|
39a62ec2f6 | ||
|
|
1310efcb07 | ||
|
|
53af592c2c | ||
|
|
023987b805 | ||
|
|
5b8fb607b4 | ||
|
|
252f6716ff | ||
|
|
bf78e28f83 | ||
|
|
22706d02a7 | ||
|
|
5cff0e946a | ||
|
|
6cbe2ed279 | ||
|
|
fb0f5f52b2 | ||
|
|
5c422bb770 | ||
|
|
fd1bc07b8c | ||
|
|
97a019d313 | ||
|
|
8ae8a564c2 | ||
|
|
2f72f57bb7 | ||
|
|
e928e3cb54 | ||
|
|
b0e2109e15 | ||
|
|
b449c6673f | ||
|
|
877ad38ac3 | ||
|
|
229f45feae | ||
|
|
a535d1f4eb | ||
|
|
d4adc00ae6 | ||
|
|
ba141f9d1d | ||
|
|
72be9793a4 | ||
|
|
5ae7cc5f84 | ||
|
|
d01a469b46 | ||
|
|
9f07052874 | ||
|
|
b9bc9d3fc2 | ||
|
|
1e180cd5ee | ||
|
|
dc9cdd13b1 |
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1354,8 +1354,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ring/ @sdb9696
|
||||
/homeassistant/components/risco/ @OnFreund
|
||||
/tests/components/risco/ @OnFreund
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/homeassistant/components/rmvtransport/ @cgtobi
|
||||
/tests/components/rmvtransport/ @cgtobi
|
||||
/homeassistant/components/roborock/ @Lash-L @allenporter
|
||||
|
||||
@@ -30,6 +30,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -68,34 +69,19 @@ class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
account_number=user_input.get(CONF_ACCOUNT_NUMBER),
|
||||
account_number=user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
)
|
||||
if isinstance(validation_response, BaseAuth):
|
||||
account_number = (
|
||||
user_input.get(CONF_ACCOUNT_NUMBER)
|
||||
or validation_response.account_number
|
||||
)
|
||||
await self.async_set_unique_id(account_number)
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=account_number,
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
CONF_ACCOUNT_NUMBER: account_number,
|
||||
},
|
||||
)
|
||||
if validation_response == "smart_meter_unavailable":
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
),
|
||||
errors={"base": validation_response},
|
||||
)
|
||||
errors["base"] = validation_response
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyanglianwater"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyanglianwater==2.1.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||
"requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"]
|
||||
"requirements": ["aioasuswrt==1.5.2", "asusrouter==1.21.3"]
|
||||
}
|
||||
|
||||
@@ -64,6 +64,12 @@ async def async_migrate_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> b
|
||||
if entry.version == 2:
|
||||
await _reauth_flow_wrapper(hass, entry, data)
|
||||
return False
|
||||
if entry.version == 3:
|
||||
# Migrate device_id to hardware_id for blinkpy 0.25.x OAuth2 compatibility
|
||||
if "device_id" in data:
|
||||
data["hardware_id"] = data.pop("device_id")
|
||||
hass.config_entries.async_update_entry(entry, data=data, version=4)
|
||||
return True
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DEVICE_ID, DOMAIN
|
||||
from .const import DOMAIN, HARDWARE_ID
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,7 +43,7 @@ async def _send_blink_2fa_pin(blink: Blink, pin: str | None) -> bool:
|
||||
class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a Blink config flow."""
|
||||
|
||||
VERSION = 3
|
||||
VERSION = 4
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the blink flow."""
|
||||
@@ -53,7 +53,7 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def _handle_user_input(self, user_input: dict[str, Any]):
|
||||
"""Handle user input."""
|
||||
self.auth = Auth(
|
||||
{**user_input, "device_id": DEVICE_ID},
|
||||
{**user_input, "hardware_id": HARDWARE_ID},
|
||||
no_prompt=True,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "blink"
|
||||
DEVICE_ID = "Home Assistant"
|
||||
HARDWARE_ID = "Home Assistant"
|
||||
|
||||
CONF_MIGRATE = "migrate"
|
||||
CONF_CAMERA = "camera"
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blink",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["blinkpy"],
|
||||
"requirements": ["blinkpy==0.24.1"]
|
||||
"requirements": ["blinkpy==0.25.1"]
|
||||
}
|
||||
|
||||
@@ -102,6 +102,12 @@ class ConfiguredDoorBird:
|
||||
"""Get token for device."""
|
||||
return self._token
|
||||
|
||||
def _get_hass_url(self) -> str:
|
||||
"""Get the Home Assistant URL for this device."""
|
||||
if custom_url := self.custom_url:
|
||||
return custom_url
|
||||
return get_url(self._hass, prefer_external=False)
|
||||
|
||||
async def async_register_events(self) -> None:
|
||||
"""Register events on device."""
|
||||
if not self.door_station_events:
|
||||
@@ -146,13 +152,7 @@ class ConfiguredDoorBird:
|
||||
|
||||
async def _async_register_events(self) -> dict[str, Any]:
|
||||
"""Register events on device."""
|
||||
# Override url if another is specified in the configuration
|
||||
if custom_url := self.custom_url:
|
||||
hass_url = custom_url
|
||||
else:
|
||||
# Get the URL of this server
|
||||
hass_url = get_url(self._hass, prefer_external=False)
|
||||
|
||||
hass_url = self._get_hass_url()
|
||||
http_fav = await self._async_get_http_favorites()
|
||||
if any(
|
||||
# Note that a list comp is used here to ensure all
|
||||
@@ -191,10 +191,14 @@ class ConfiguredDoorBird:
|
||||
self._get_event_name(event): event_type
|
||||
for event, event_type in DEFAULT_EVENT_TYPES
|
||||
}
|
||||
hass_url = self._get_hass_url()
|
||||
for identifier, data in http_fav.items():
|
||||
title: str | None = data.get("title")
|
||||
if not title or not title.startswith("Home Assistant"):
|
||||
continue
|
||||
value: str | None = data.get("value")
|
||||
if not value or not value.startswith(hass_url):
|
||||
continue # Not our favorite - different HA instance or stale
|
||||
event = title.partition("(")[2].strip(")")
|
||||
if input_type := favorite_input_type.get(identifier):
|
||||
events.append(DoorbirdEvent(event, input_type))
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["evohome", "evohomeasync", "evohomeasync2"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["evohome-async==1.0.5"]
|
||||
"requirements": ["evohome-async==1.0.6"]
|
||||
}
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251203.0"]
|
||||
"requirements": ["home-assistant-frontend==20251203.2"]
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"winter_mode": {
|
||||
"description": "Adds falling snowflakes on your screen. Get your home ready for winter! ❄️",
|
||||
"disable_confirmation": "Snowflakes will no longer fall on your screen. You can re-enable this at any time in labs settings.",
|
||||
"enable_confirmation": "Snowflakes will start falling on your screen. You can turn this off at any time in labs settings.",
|
||||
"description": "Adds falling snowflakes on your screen. Get your home ready for winter! ❄️\n\nIf you have animations disabled in your device accessibility settings, this feature will not work.",
|
||||
"disable_confirmation": "Snowflakes will no longer fall on your screen. You can re-enable this at any time in Labs settings.",
|
||||
"enable_confirmation": "Snowflakes will start falling on your screen. You can turn this off at any time in Labs settings.",
|
||||
"name": "Winter mode"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==11.1.0"]
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -51,9 +51,9 @@ async def _validate_input(
|
||||
description_placeholders: dict[str, str],
|
||||
) -> bool:
|
||||
try:
|
||||
await api.async_air_quality(
|
||||
await api.async_get_current_conditions(
|
||||
lat=user_input[CONF_LOCATION][CONF_LATITUDE],
|
||||
long=user_input[CONF_LOCATION][CONF_LONGITUDE],
|
||||
lon=user_input[CONF_LOCATION][CONF_LONGITUDE],
|
||||
)
|
||||
except GoogleAirQualityApiError as err:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Final
|
||||
|
||||
from google_air_quality_api.api import GoogleAirQualityApi
|
||||
from google_air_quality_api.exceptions import GoogleAirQualityApiError
|
||||
from google_air_quality_api.model import AirQualityData
|
||||
from google_air_quality_api.model import AirQualityCurrentConditionsData
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
|
||||
@@ -23,7 +23,9 @@ UPDATE_INTERVAL: Final = timedelta(hours=1)
|
||||
type GoogleAirQualityConfigEntry = ConfigEntry[GoogleAirQualityRuntimeData]
|
||||
|
||||
|
||||
class GoogleAirQualityUpdateCoordinator(DataUpdateCoordinator[AirQualityData]):
|
||||
class GoogleAirQualityUpdateCoordinator(
|
||||
DataUpdateCoordinator[AirQualityCurrentConditionsData]
|
||||
):
|
||||
"""Coordinator for fetching Google AirQuality data."""
|
||||
|
||||
config_entry: GoogleAirQualityConfigEntry
|
||||
@@ -48,10 +50,10 @@ class GoogleAirQualityUpdateCoordinator(DataUpdateCoordinator[AirQualityData]):
|
||||
self.lat = subentry.data[CONF_LATITUDE]
|
||||
self.long = subentry.data[CONF_LONGITUDE]
|
||||
|
||||
async def _async_update_data(self) -> AirQualityData:
|
||||
async def _async_update_data(self) -> AirQualityCurrentConditionsData:
|
||||
"""Fetch air quality data for this coordinate."""
|
||||
try:
|
||||
return await self.client.async_air_quality(self.lat, self.long)
|
||||
return await self.client.async_get_current_conditions(self.lat, self.long)
|
||||
except GoogleAirQualityApiError as ex:
|
||||
_LOGGER.debug("Cannot fetch air quality data: %s", str(ex))
|
||||
raise UpdateFailed(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_air_quality_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["google_air_quality_api==1.1.3"]
|
||||
"requirements": ["google_air_quality_api==2.0.2"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from google_air_quality_api.model import AirQualityData
|
||||
from google_air_quality_api.model import AirQualityCurrentConditionsData
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -33,15 +33,17 @@ PARALLEL_UPDATES = 0
|
||||
class AirQualitySensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Air Quality sensor entity."""
|
||||
|
||||
exists_fn: Callable[[AirQualityData], bool] = lambda _: True
|
||||
options_fn: Callable[[AirQualityData], list[str] | None] = lambda _: None
|
||||
value_fn: Callable[[AirQualityData], StateType]
|
||||
native_unit_of_measurement_fn: Callable[[AirQualityData], str | None] = (
|
||||
exists_fn: Callable[[AirQualityCurrentConditionsData], bool] = lambda _: True
|
||||
options_fn: Callable[[AirQualityCurrentConditionsData], list[str] | None] = (
|
||||
lambda _: None
|
||||
)
|
||||
translation_placeholders_fn: Callable[[AirQualityData], dict[str, str]] | None = (
|
||||
None
|
||||
)
|
||||
value_fn: Callable[[AirQualityCurrentConditionsData], StateType]
|
||||
native_unit_of_measurement_fn: Callable[
|
||||
[AirQualityCurrentConditionsData], str | None
|
||||
] = lambda _: None
|
||||
translation_placeholders_fn: (
|
||||
Callable[[AirQualityCurrentConditionsData], dict[str, str]] | None
|
||||
) = None
|
||||
|
||||
|
||||
AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
|
||||
@@ -88,16 +88,16 @@
|
||||
"1b_good_air_quality": "1B - Good air quality",
|
||||
"2_cyan": "2 - Cyan",
|
||||
"2_light_green": "2 - Light green",
|
||||
"2_orange": "4 - Orange",
|
||||
"2_red": "5 - Red",
|
||||
"2_yellow": "3 - Yellow",
|
||||
"2a_acceptable_air_quality": "2A - Acceptable air quality",
|
||||
"2b_acceptable_air_quality": "2B - Acceptable air quality",
|
||||
"3_green": "3 - Green",
|
||||
"3_yellow": "3 - Yellow",
|
||||
"3a_aggravated_air_quality": "3A - Aggravated air quality",
|
||||
"3b_bad_air_quality": "3B - Bad air quality",
|
||||
"4_orange": "4 - Orange",
|
||||
"4_yellow_watch": "4 - Yellow/Watch",
|
||||
"5_orange_alert": "5 - Orange/Alert",
|
||||
"5_red": "5 - Red",
|
||||
"6_red_alert": "6 - Red/Alert+",
|
||||
"10_33": "10-33% of guideline",
|
||||
"33_66": "33-66% of guideline",
|
||||
|
||||
@@ -149,6 +149,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
description_placeholders={"example_image_path": "/config/www/image.jpg"},
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.5-flash"
|
||||
RECOMMENDED_STT_MODEL = RECOMMENDED_CHAT_MODEL
|
||||
RECOMMENDED_TTS_MODEL = "models/gemini-2.5-flash-preview-tts"
|
||||
RECOMMENDED_IMAGE_MODEL = "models/gemini-2.5-flash-image-preview"
|
||||
RECOMMENDED_IMAGE_MODEL = "models/gemini-2.5-flash-image"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_TOP_P = "top_p"
|
||||
|
||||
@@ -162,7 +162,7 @@
|
||||
"fields": {
|
||||
"filenames": {
|
||||
"description": "Attachments to add to the prompt (images, PDFs, etc)",
|
||||
"example": "/config/www/image.jpg",
|
||||
"example": "{example_image_path}",
|
||||
"name": "Attachment filenames"
|
||||
},
|
||||
"prompt": {
|
||||
|
||||
@@ -159,4 +159,5 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
_async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
description_placeholders={"example_image_path": "/config/www/image.jpg"},
|
||||
)
|
||||
|
||||
@@ -92,7 +92,7 @@
|
||||
},
|
||||
"filename": {
|
||||
"description": "Path to the image or video to upload.",
|
||||
"example": "/config/www/image.jpg",
|
||||
"example": "{example_image_path}",
|
||||
"name": "Filename"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -27,6 +27,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eBatChargeToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_battery_charge_lifetime",
|
||||
@@ -42,6 +43,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eBatDisChargeToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_battery_discharge_lifetime",
|
||||
@@ -57,6 +59,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="epvToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_solar_generation_lifetime",
|
||||
@@ -72,6 +75,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pDischarge1",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_battery_voltage",
|
||||
@@ -101,6 +105,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="elocalLoadToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_load_consumption_lifetime",
|
||||
@@ -116,6 +121,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="etoGridToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_export_to_grid_lifetime",
|
||||
@@ -132,6 +138,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="chargePower",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_load_consumption",
|
||||
@@ -139,6 +146,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pLocalLoad",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_wattage_pv_1",
|
||||
@@ -146,6 +154,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pPv1",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_wattage_pv_2",
|
||||
@@ -153,6 +162,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pPv2",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_wattage_pv_all",
|
||||
@@ -160,6 +170,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_export_to_grid",
|
||||
@@ -167,6 +178,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pactogrid",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_import_from_grid",
|
||||
@@ -174,6 +186,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pactouser",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_battery_discharge_kw",
|
||||
@@ -181,6 +194,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pdisCharge1",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_grid_voltage",
|
||||
@@ -196,6 +210,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eCharge",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_load_consumption_solar_today",
|
||||
@@ -203,6 +218,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eChargeToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_self_consumption_today",
|
||||
@@ -210,6 +226,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eChargeToday1",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_load_consumption_battery_today",
|
||||
@@ -217,6 +234,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="echarge1",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_import_from_grid_today",
|
||||
@@ -224,6 +242,7 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="etouser",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
# This sensor is manually created using the most recent X-Axis value from the chartData
|
||||
GrowattSensorEntityDescription(
|
||||
|
||||
@@ -79,6 +79,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv1",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -122,6 +123,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv2",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -165,6 +167,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv3",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -208,6 +211,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv4",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -234,6 +238,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -258,6 +263,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pac",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -323,6 +329,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="bdc1DischargePower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_battery_1_discharge_total",
|
||||
@@ -339,6 +346,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="bdc2DischargePower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_battery_2_discharge_total",
|
||||
@@ -372,6 +380,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="bdc1ChargePower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_battery_1_charge_total",
|
||||
@@ -388,6 +397,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="bdc2ChargePower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_battery_2_charge_total",
|
||||
@@ -445,6 +455,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pacToLocalLoad",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -453,6 +464,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pacToUserTotal",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -461,6 +473,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pacToGridTotal",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -545,6 +558,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="psystem",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -553,6 +567,7 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pself",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -50,5 +50,6 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="nominalPower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hanna",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["hanna-cloud==0.0.6"]
|
||||
"requirements": ["hanna-cloud==0.0.7"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import logging
|
||||
|
||||
from HueBLE import HueBleLight
|
||||
from HueBLE import ConnectionError, HueBleError, HueBleLight
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
async_ble_device_from_address,
|
||||
@@ -38,8 +38,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: HueBLEConfigEntry) -> bo
|
||||
|
||||
light = HueBleLight(ble_device)
|
||||
|
||||
if not await light.connect() or not await light.poll_state():
|
||||
raise ConfigEntryNotReady("Device found but unable to connect.")
|
||||
try:
|
||||
await light.connect()
|
||||
await light.poll_state()
|
||||
except ConnectionError as e:
|
||||
raise ConfigEntryNotReady("Device found but unable to connect.") from e
|
||||
except HueBleError as e:
|
||||
raise ConfigEntryNotReady(
|
||||
"Device found and connected but unable to poll values from it."
|
||||
) from e
|
||||
|
||||
entry.runtime_data = light
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from enum import Enum
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from HueBLE import HueBleLight
|
||||
from HueBLE import ConnectionError, HueBleError, HueBleLight, PairingError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
@@ -20,7 +20,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN, URL_PAIRING_MODE
|
||||
from .const import DOMAIN, URL_FACTORY_RESET, URL_PAIRING_MODE
|
||||
from .light import get_available_color_modes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -41,32 +41,22 @@ async def validate_input(hass: HomeAssistant, address: str) -> Error | None:
|
||||
|
||||
try:
|
||||
light = HueBleLight(ble_device)
|
||||
|
||||
await light.connect()
|
||||
get_available_color_modes(light)
|
||||
await light.poll_state()
|
||||
|
||||
if light.authenticated is None:
|
||||
_LOGGER.warning(
|
||||
"Unable to determine if light authenticated, proceeding anyway"
|
||||
)
|
||||
elif not light.authenticated:
|
||||
return Error.INVALID_AUTH
|
||||
|
||||
if not light.connected:
|
||||
return Error.CANNOT_CONNECT
|
||||
|
||||
try:
|
||||
get_available_color_modes(light)
|
||||
except HomeAssistantError:
|
||||
return Error.NOT_SUPPORTED
|
||||
|
||||
_, errors = await light.poll_state()
|
||||
if len(errors) != 0:
|
||||
_LOGGER.warning("Errors raised when connecting to light: %s", errors)
|
||||
return Error.CANNOT_CONNECT
|
||||
|
||||
except Exception:
|
||||
except ConnectionError as e:
|
||||
_LOGGER.exception("Error connecting to light")
|
||||
return (
|
||||
Error.INVALID_AUTH
|
||||
if type(e.__cause__) is PairingError
|
||||
else Error.CANNOT_CONNECT
|
||||
)
|
||||
except HueBleError:
|
||||
_LOGGER.exception("Unexpected error validating light connection")
|
||||
return Error.UNKNOWN
|
||||
except HomeAssistantError:
|
||||
return Error.NOT_SUPPORTED
|
||||
else:
|
||||
return None
|
||||
finally:
|
||||
@@ -129,6 +119,7 @@ class HueBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_NAME: self._discovery_info.name,
|
||||
CONF_MAC: self._discovery_info.address,
|
||||
"url_pairing_mode": URL_PAIRING_MODE,
|
||||
"url_factory_reset": URL_FACTORY_RESET,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -2,3 +2,4 @@
|
||||
|
||||
DOMAIN = "hue_ble"
|
||||
URL_PAIRING_MODE = "https://www.home-assistant.io/integrations/hue_ble#initial-setup"
|
||||
URL_FACTORY_RESET = "https://www.philips-hue.com/en-gb/support/article/how-to-factory-reset-philips-hue-lights/000004"
|
||||
|
||||
@@ -113,7 +113,7 @@ class HueBLELight(LightEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Fetch latest state from light and make available via properties."""
|
||||
await self._api.poll_state(run_callbacks=True)
|
||||
await self._api.poll_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Set properties then turn the light on."""
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["bleak", "HueBLE"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["HueBLE==1.0.8"]
|
||||
"requirements": ["HueBLE==2.1.0"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Do you want to set up {name} ({mac})?. Make sure the light is [made discoverable to voice assistants]({url_pairing_mode})."
|
||||
"description": "Do you want to set up {name} ({mac})?. Make sure the light is [made discoverable to voice assistants]({url_pairing_mode}) or has been [factory reset]({url_factory_reset})."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,6 +39,10 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_DESCRIPTION_PLACEHOLDERS = {
|
||||
"sensor_value_types_url": "https://www.home-assistant.io/integrations/knx/#value-types"
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@@ -48,6 +52,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_SEND,
|
||||
service_send_to_knx_bus,
|
||||
schema=SERVICE_KNX_SEND_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
@@ -63,6 +68,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_EVENT_REGISTER,
|
||||
service_event_register_modify,
|
||||
schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async_register_admin_service(
|
||||
@@ -71,6 +77,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_EXPOSURE_REGISTER,
|
||||
service_exposure_register_modify,
|
||||
schema=SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async_register_admin_service(
|
||||
|
||||
@@ -674,7 +674,7 @@
|
||||
"name": "Remove event registration"
|
||||
},
|
||||
"type": {
|
||||
"description": "If set, the payload will be decoded as given DPT in the event data `value` key. KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
|
||||
"description": "If set, the payload will be decoded as given DPT in the event data `value` key. KNX sensor types are valid values (see {sensor_value_types_url}).",
|
||||
"name": "Value type"
|
||||
}
|
||||
},
|
||||
@@ -704,7 +704,7 @@
|
||||
"name": "Remove exposure"
|
||||
},
|
||||
"type": {
|
||||
"description": "Telegrams will be encoded as given DPT. 'binary' and all KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
|
||||
"description": "Telegrams will be encoded as given DPT. 'binary' and all KNX sensor types are valid values (see {sensor_value_types_url}).",
|
||||
"name": "Value type"
|
||||
}
|
||||
},
|
||||
@@ -740,7 +740,7 @@
|
||||
"name": "Send as Response"
|
||||
},
|
||||
"type": {
|
||||
"description": "If set, the payload will not be sent as raw bytes, but encoded as given DPT. KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
|
||||
"description": "If set, the payload will not be sent as raw bytes, but encoded as given DPT. KNX sensor types are valid values (see {sensor_value_types_url}).",
|
||||
"name": "Value type"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7,11 +7,10 @@ in the Home Assistant Labs UI for users to enable or disable.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -19,6 +18,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .helpers import async_is_preview_feature_enabled, async_listen
|
||||
from .models import (
|
||||
EventLabsUpdatedData,
|
||||
LabPreviewFeature,
|
||||
@@ -135,55 +135,3 @@ async def _async_scan_all_preview_features(
|
||||
|
||||
_LOGGER.debug("Loaded %d total lab preview features", len(preview_features))
|
||||
return preview_features
|
||||
|
||||
|
||||
@callback
|
||||
def async_is_preview_feature_enabled(
|
||||
hass: HomeAssistant, domain: str, preview_feature: str
|
||||
) -> bool:
|
||||
"""Check if a lab preview feature is enabled.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
|
||||
Returns:
|
||||
True if the preview feature is enabled, False otherwise
|
||||
"""
|
||||
if LABS_DATA not in hass.data:
|
||||
return False
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Callback to invoke when the preview feature is toggled
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
|
||||
63
homeassistant/components/labs/helpers.py
Normal file
63
homeassistant/components/labs/helpers.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Helper functions for the Home Assistant Labs integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@callback
|
||||
def async_is_preview_feature_enabled(
|
||||
hass: HomeAssistant, domain: str, preview_feature: str
|
||||
) -> bool:
|
||||
"""Check if a lab preview feature is enabled.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
|
||||
Returns:
|
||||
True if the preview feature is enabled, False otherwise
|
||||
"""
|
||||
if LABS_DATA not in hass.data:
|
||||
return False
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Callback to invoke when the preview feature is toggled
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
@@ -12,6 +12,7 @@ from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .helpers import async_is_preview_feature_enabled, async_listen
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@@ -20,6 +21,7 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the number websocket API."""
|
||||
websocket_api.async_register_command(hass, websocket_list_preview_features)
|
||||
websocket_api.async_register_command(hass, websocket_update_preview_feature)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_feature)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -108,3 +110,52 @@ async def websocket_update_preview_feature(
|
||||
hass.bus.async_fire(EVENT_LABS_UPDATED, event_data)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "labs/subscribe",
|
||||
vol.Required("domain"): str,
|
||||
vol.Required("preview_feature"): str,
|
||||
}
|
||||
)
|
||||
def websocket_subscribe_feature(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a specific lab preview feature updates."""
|
||||
domain = msg["domain"]
|
||||
preview_feature_key = msg["preview_feature"]
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
|
||||
preview_feature_id = f"{domain}.{preview_feature_key}"
|
||||
|
||||
if preview_feature_id not in labs_data.preview_features:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.ERR_NOT_FOUND,
|
||||
f"Preview feature {preview_feature_id} not found",
|
||||
)
|
||||
return
|
||||
|
||||
preview_feature = labs_data.preview_features[preview_feature_id]
|
||||
|
||||
@callback
|
||||
def send_event() -> None:
|
||||
"""Send feature state to client."""
|
||||
enabled = async_is_preview_feature_enabled(hass, domain, preview_feature_key)
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"],
|
||||
preview_feature.to_dict(enabled=enabled),
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_listen(
|
||||
hass, domain, preview_feature_key, send_event
|
||||
)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
send_event()
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.2.2"]
|
||||
"requirements": ["pylamarzocco==2.2.4"]
|
||||
}
|
||||
|
||||
@@ -108,6 +108,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_MESSAGE,
|
||||
_async_service_message,
|
||||
schema=SERVICE_MESSAGE_SCHEMA,
|
||||
description_placeholders={"icons_url": "https://developer.lametric.com/icons"},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -211,7 +211,7 @@
|
||||
"name": "[%key:common::config_flow::data::device%]"
|
||||
},
|
||||
"icon": {
|
||||
"description": "The ID number of the icon or animation to display. List of all icons and their IDs can be found at: https://developer.lametric.com/icons.",
|
||||
"description": "The ID number of the icon or animation to display. List of all icons and their IDs can be found at: {icons_url}.",
|
||||
"name": "Icon ID"
|
||||
},
|
||||
"icon_type": {
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pypck==0.9.5", "lcn-frontend==0.2.7"]
|
||||
"requirements": ["pypck==0.9.7", "lcn-frontend==0.2.7"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==11.1.0"]
|
||||
"requirements": ["ical==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==11.1.0"]
|
||||
"requirements": ["ical==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -98,7 +98,11 @@ class LutronCasetaSmartAwaySwitch(LutronCasetaEntity, SwitchEntity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
await super().async_added_to_hass()
|
||||
self._smartbridge.add_smart_away_subscriber(self._handle_bridge_update)
|
||||
self._smartbridge.add_smart_away_subscriber(self._handle_smart_away_update)
|
||||
|
||||
def _handle_smart_away_update(self, smart_away_state: str | None = None) -> None:
|
||||
"""Handle updated smart away state from the bridge."""
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn Smart Away on."""
|
||||
|
||||
@@ -183,6 +183,48 @@ class MatterModeSelectEntity(MatterAttributeSelectEntity):
|
||||
self._attr_name = desc
|
||||
|
||||
|
||||
class MatterDoorLockOperatingModeSelectEntity(MatterAttributeSelectEntity):
|
||||
"""Representation of a Door Lock Operating Mode select entity.
|
||||
|
||||
This entity dynamically filters available operating modes based on the device's
|
||||
`SupportedOperatingModes` bitmap attribute. In this bitmap, bit=0 indicates a
|
||||
supported mode and bit=1 indicates unsupported (inverted from typical bitmap conventions).
|
||||
If the bitmap is unavailable, only mandatory modes are included. The mapping from
|
||||
bitmap bits to operating mode values is defined by the Matter specification.
|
||||
"""
|
||||
|
||||
entity_description: MatterMapSelectEntityDescription
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
# Get the bitmap of supported operating modes
|
||||
supported_modes_bitmap = self.get_matter_attribute_value(
|
||||
self.entity_description.list_attribute
|
||||
)
|
||||
|
||||
# Convert bitmap to list of supported mode values
|
||||
# NOTE: The Matter spec inverts the usual meaning: bit=0 means supported,
|
||||
# bit=1 means not supported, undefined bits must be 1. Mandatory modes are
|
||||
# bits 0 (Normal) and 3 (NoRemoteLockUnlock).
|
||||
num_mode_bits = supported_modes_bitmap.bit_length()
|
||||
supported_mode_values = [
|
||||
bit_position
|
||||
for bit_position in range(num_mode_bits)
|
||||
if not supported_modes_bitmap & (1 << bit_position)
|
||||
]
|
||||
|
||||
# Map supported mode values to their string representations
|
||||
self._attr_options = [
|
||||
mapped_value
|
||||
for mode_value in supported_mode_values
|
||||
if (mapped_value := self.entity_description.device_to_ha(mode_value))
|
||||
]
|
||||
|
||||
# Use base implementation to set the current option
|
||||
super()._update_from_device()
|
||||
|
||||
|
||||
class MatterListSelectEntity(MatterEntity, SelectEntity):
|
||||
"""Representation of a select entity from Matter list and selected item Cluster attribute(s)."""
|
||||
|
||||
@@ -594,15 +636,18 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SELECT,
|
||||
entity_description=MatterSelectEntityDescription(
|
||||
entity_description=MatterMapSelectEntityDescription(
|
||||
key="DoorLockOperatingMode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="door_lock_operating_mode",
|
||||
options=list(DOOR_LOCK_OPERATING_MODE_MAP.values()),
|
||||
list_attribute=clusters.DoorLock.Attributes.SupportedOperatingModes,
|
||||
device_to_ha=DOOR_LOCK_OPERATING_MODE_MAP.get,
|
||||
ha_to_device=DOOR_LOCK_OPERATING_MODE_MAP_REVERSE.get,
|
||||
),
|
||||
entity_class=MatterAttributeSelectEntity,
|
||||
required_attributes=(clusters.DoorLock.Attributes.OperatingMode,),
|
||||
entity_class=MatterDoorLockOperatingModeSelectEntity,
|
||||
required_attributes=(
|
||||
clusters.DoorLock.Attributes.OperatingMode,
|
||||
clusters.DoorLock.Attributes.SupportedOperatingModes,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2025.11.12"],
|
||||
"requirements": ["yt-dlp[default]==2025.12.08"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -191,6 +191,7 @@ class ProgramPhaseWashingMachine(MieleEnum, missing_to_none=True):
|
||||
drying = 280
|
||||
disinfecting = 285
|
||||
flex_load_active = 11047
|
||||
automatic_start = 11044
|
||||
|
||||
|
||||
class ProgramPhaseTumbleDryer(MieleEnum, missing_to_none=True):
|
||||
@@ -451,19 +452,19 @@ class WashingMachineProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for washing machines."""
|
||||
|
||||
no_program = 0, -1
|
||||
cottons = 1
|
||||
cottons = 1, 10001
|
||||
minimum_iron = 3
|
||||
delicates = 4
|
||||
woollens = 8
|
||||
silks = 9
|
||||
delicates = 4, 10022
|
||||
woollens = 8, 10040
|
||||
silks = 9, 10042
|
||||
starch = 17
|
||||
rinse = 18
|
||||
drain_spin = 21
|
||||
curtains = 22
|
||||
shirts = 23
|
||||
rinse = 18, 10058
|
||||
drain_spin = 21, 10036
|
||||
curtains = 22, 10055
|
||||
shirts = 23, 10038
|
||||
denim = 24, 123
|
||||
proofing = 27
|
||||
sportswear = 29
|
||||
proofing = 27, 10057
|
||||
sportswear = 29, 10052
|
||||
automatic_plus = 31
|
||||
outerwear = 37
|
||||
pillows = 39
|
||||
@@ -472,19 +473,29 @@ class WashingMachineProgramId(MieleEnum, missing_to_none=True):
|
||||
rinse_out_lint = 48 # washer-dryer
|
||||
dark_garments = 50
|
||||
separate_rinse_starch = 52
|
||||
first_wash = 53
|
||||
first_wash = 53, 10053
|
||||
cottons_hygiene = 69
|
||||
steam_care = 75 # washer-dryer
|
||||
freshen_up = 76 # washer-dryer
|
||||
trainers = 77
|
||||
clean_machine = 91
|
||||
down_duvets = 95
|
||||
express_20 = 122
|
||||
trainers = 77, 10056
|
||||
clean_machine = 91, 10067
|
||||
down_duvets = 95, 10050
|
||||
express_20 = 122, 10029
|
||||
down_filled_items = 129
|
||||
cottons_eco = 133
|
||||
quick_power_wash = 146, 10031
|
||||
eco_40_60 = 190, 10007
|
||||
normal = 10001
|
||||
bed_linen = 10047
|
||||
easy_care = 10016
|
||||
dark_jeans = 10048
|
||||
outdoor_garments = 10049
|
||||
game_pieces = 10070
|
||||
stuffed_toys = 10069
|
||||
pre_ironing = 10059
|
||||
trainers_refresh = 10066
|
||||
smartmatic = 10068
|
||||
cottonrepair = 10065
|
||||
powerfresh = 10075
|
||||
|
||||
|
||||
class DishWasherProgramId(MieleEnum, missing_to_none=True):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pymiele"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymiele==0.6.0"],
|
||||
"requirements": ["pymiele==0.6.1"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_mieleathome._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -411,6 +411,7 @@
|
||||
"cook_bacon": "Cook bacon",
|
||||
"cool_air": "Cool air",
|
||||
"corn_on_the_cob": "Corn on the cob",
|
||||
"cottonrepair": "CottonRepair",
|
||||
"cottons": "Cottons",
|
||||
"cottons_eco": "Cottons ECO",
|
||||
"cottons_hygiene": "Cottons hygiene",
|
||||
@@ -440,6 +441,7 @@
|
||||
"custom_program_8": "Custom program 8",
|
||||
"custom_program_9": "Custom program 9",
|
||||
"dark_garments": "Dark garments",
|
||||
"dark_jeans": "Dark/jeans",
|
||||
"dark_mixed_grain_bread": "Dark mixed grain bread",
|
||||
"decrystallise_honey": "Decrystallize honey",
|
||||
"defrost": "Defrost",
|
||||
@@ -457,6 +459,7 @@
|
||||
"drop_cookies_2_trays": "Drop cookies (2 trays)",
|
||||
"duck": "Duck",
|
||||
"dutch_hash": "Dutch hash",
|
||||
"easy_care": "Easy care",
|
||||
"eco": "ECO",
|
||||
"eco_40_60": "ECO 40-60",
|
||||
"eco_fan_heat": "ECO fan heat",
|
||||
@@ -487,6 +490,7 @@
|
||||
"fruit_streusel_cake": "Fruit streusel cake",
|
||||
"fruit_tea": "Fruit tea",
|
||||
"full_grill": "Full grill",
|
||||
"game_pieces": "Game pieces",
|
||||
"gentle": "Gentle",
|
||||
"gentle_denim": "Gentle denim",
|
||||
"gentle_minimum_iron": "Gentle minimum iron",
|
||||
@@ -607,6 +611,7 @@
|
||||
"oats_cracked": "Oats (cracked)",
|
||||
"oats_whole": "Oats (whole)",
|
||||
"osso_buco": "Osso buco",
|
||||
"outdoor_garments": "Outdoor garments",
|
||||
"outerwear": "Outerwear",
|
||||
"oyster_mushroom_diced": "Oyster mushroom (diced)",
|
||||
"oyster_mushroom_strips": "Oyster mushroom (strips)",
|
||||
@@ -713,8 +718,10 @@
|
||||
"potatoes_waxy_whole_small": "Potatoes (waxy, whole, small)",
|
||||
"poularde_breast": "Poularde breast",
|
||||
"poularde_whole": "Poularde (whole)",
|
||||
"power_fresh": "PowerFresh",
|
||||
"power_wash": "PowerWash",
|
||||
"prawns": "Prawns",
|
||||
"pre_ironing": "Pre-ironing",
|
||||
"proofing": "Proofing",
|
||||
"prove_15_min": "Prove for 15 min",
|
||||
"prove_30_min": "Prove for 30 min",
|
||||
@@ -807,6 +814,7 @@
|
||||
"simiao_rapid_steam_cooking": "Simiao (rapid steam cooking)",
|
||||
"simiao_steam_cooking": "Simiao (steam cooking)",
|
||||
"small_shrimps": "Small shrimps",
|
||||
"smartmatic": "SmartMatic",
|
||||
"smoothing": "Smoothing",
|
||||
"snow_pea": "Snow pea",
|
||||
"soak": "Soak",
|
||||
@@ -833,6 +841,7 @@
|
||||
"sterilize_crockery": "Sterilize crockery",
|
||||
"stollen": "Stollen",
|
||||
"stuffed_cabbage": "Stuffed cabbage",
|
||||
"stuffed_toys": "Stuffed toys",
|
||||
"sweat_onions": "Sweat onions",
|
||||
"swede_cut_into_batons": "Swede (cut into batons)",
|
||||
"swede_diced": "Swede (diced)",
|
||||
@@ -855,6 +864,7 @@
|
||||
"top_heat": "Top heat",
|
||||
"tortellini_fresh": "Tortellini (fresh)",
|
||||
"trainers": "Trainers",
|
||||
"trainers_refresh": "Trainers refresh",
|
||||
"treacle_sponge_pudding_one_large": "Treacle sponge pudding (one large)",
|
||||
"treacle_sponge_pudding_several_small": "Treacle sponge pudding (several small)",
|
||||
"trout": "Trout",
|
||||
@@ -935,6 +945,7 @@
|
||||
"2nd_grinding": "2nd grinding",
|
||||
"2nd_pre_brewing": "2nd pre-brewing",
|
||||
"anti_crease": "Anti-crease",
|
||||
"automatic_start": "Automatic start",
|
||||
"blocked_brushes": "Brushes blocked",
|
||||
"blocked_drive_wheels": "Drive wheels blocked",
|
||||
"blocked_front_wheel": "Front wheel blocked",
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
"ws_path": "WebSocket path"
|
||||
},
|
||||
"data_description": {
|
||||
"advanced_options": "Enable and select **Next** to set advanced options.",
|
||||
"advanced_options": "Enable and select **Submit** to set advanced options.",
|
||||
"broker": "The hostname or IP address of your MQTT broker.",
|
||||
"certificate": "The custom CA certificate file to validate your MQTT brokers certificate.",
|
||||
"client_cert": "The client certificate to authenticate against your MQTT broker.",
|
||||
|
||||
@@ -163,9 +163,6 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
LOGGER.exception("Unexpected exception during add-on discovery")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
if not server_info.onboard_done:
|
||||
return self.async_abort(reason="server_not_ready")
|
||||
|
||||
# We trust the token from hassio discovery and validate it during setup
|
||||
self.token = discovery_info.config["auth_token"]
|
||||
|
||||
@@ -226,11 +223,6 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
LOGGER.debug("Ignoring add-on server in zeroconf discovery")
|
||||
return self.async_abort(reason="already_discovered_addon")
|
||||
|
||||
# Ignore servers that have not completed onboarding yet
|
||||
if not server_info.onboard_done:
|
||||
LOGGER.debug("Ignoring server that hasn't completed onboarding")
|
||||
return self.async_abort(reason="server_not_ready")
|
||||
|
||||
self.url = server_info.base_url
|
||||
self.server_info = server_info
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ SENSORS = [
|
||||
key="current",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda client: client.power.amps,
|
||||
),
|
||||
OhmeSensorDescription(
|
||||
@@ -57,6 +58,7 @@ SENSORS = [
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda client: client.power.watts,
|
||||
),
|
||||
OhmeSensorDescription(
|
||||
@@ -81,6 +83,7 @@ SENSORS = [
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
suggested_display_precision=0,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda client: client.battery,
|
||||
),
|
||||
OhmeSensorDescription(
|
||||
|
||||
@@ -129,4 +129,5 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
description_placeholders={"example_image_path": "/config/www/image.jpg"},
|
||||
)
|
||||
|
||||
@@ -156,7 +156,7 @@
|
||||
},
|
||||
"filename": {
|
||||
"description": "Path to the file to upload.",
|
||||
"example": "/config/www/image.jpg",
|
||||
"example": "{example_image_path}",
|
||||
"name": "Filename"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["oralb_ble"],
|
||||
"requirements": ["oralb-ble==0.17.6"]
|
||||
"requirements": ["oralb-ble==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import track_point_in_utc_time
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -101,7 +102,18 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
except OSError:
|
||||
_LOGGER.error("Pilight send failed for %s", str(message_data))
|
||||
|
||||
hass.services.register(DOMAIN, SERVICE_NAME, send_code, schema=RF_CODE_SCHEMA)
|
||||
def _register_service() -> None:
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_NAME,
|
||||
send_code,
|
||||
schema=RF_CODE_SCHEMA,
|
||||
description_placeholders={
|
||||
"pilight_protocols_docs_url": "https://manual.pilight.org/protocols/index.html"
|
||||
},
|
||||
)
|
||||
|
||||
run_callback_threadsafe(hass.loop, _register_service).result()
|
||||
|
||||
# Publish received codes on the HA event bus
|
||||
# A whitelist of codes to be published in the event bus
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"description": "Sends RF code to Pilight device.",
|
||||
"fields": {
|
||||
"protocol": {
|
||||
"description": "Protocol that Pilight recognizes. See https://manual.pilight.org/protocols/index.html for supported protocols and additional parameters that each protocol supports.",
|
||||
"description": "Protocol that Pilight recognizes. See {pilight_protocols_docs_url} for supported protocols and additional parameters that each protocol supports.",
|
||||
"name": "Protocol"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -54,8 +54,11 @@ from .const import (
|
||||
)
|
||||
from .coordinator import RainMachineDataUpdateCoordinator
|
||||
|
||||
DEFAULT_SSL = True
|
||||
API_URL_REFERENCE = (
|
||||
"https://rainmachine.docs.apiary.io/#reference/weather-services/parserdata/post"
|
||||
)
|
||||
|
||||
DEFAULT_SSL = True
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -455,7 +458,15 @@ async def async_setup_entry( # noqa: C901
|
||||
):
|
||||
if hass.services.has_service(DOMAIN, service_name):
|
||||
continue
|
||||
hass.services.async_register(DOMAIN, service_name, method, schema=schema)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
service_name,
|
||||
method,
|
||||
schema=schema,
|
||||
description_placeholders={
|
||||
"api_url": API_URL_REFERENCE,
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -128,7 +128,7 @@
|
||||
"name": "Push flow meter data"
|
||||
},
|
||||
"push_weather_data": {
|
||||
"description": "Sends weather data from Home Assistant to the RainMachine device.\nLocal Weather Push service should be enabled from Settings > Weather > Developer tab for RainMachine to consider the values being sent. Units must be sent in metric; no conversions are performed by the integration.\nSee details of RainMachine API here: https://rainmachine.docs.apiary.io/#reference/weather-services/parserdata/post.",
|
||||
"description": "Sends weather data from Home Assistant to the RainMachine device.\nLocal Weather Push service should be enabled from Settings > Weather > Developer tab for RainMachine to consider the values being sent. Units must be sent in metric; no conversions are performed by the integration.\nSee details of RainMachine API here: {api_url}",
|
||||
"fields": {
|
||||
"condition": {
|
||||
"description": "Current weather condition code (WNUM).",
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==11.1.0"]
|
||||
"requirements": ["ical==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -422,6 +422,8 @@ class ReolinkHost:
|
||||
"name": self._api.nvr_name,
|
||||
"base_url": self._base_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
"example_ip": "192.168.1.10",
|
||||
"example_url": "http://192.168.1.10:8123",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -436,6 +438,8 @@ class ReolinkHost:
|
||||
translation_placeholders={
|
||||
"base_url": self._base_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
"example_ip": "192.168.1.10",
|
||||
"example_url": "http://192.168.1.10:8123",
|
||||
},
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.17.0"]
|
||||
"requirements": ["reolink-aio==0.17.1"]
|
||||
}
|
||||
|
||||
@@ -1004,7 +1004,7 @@
|
||||
"title": "Reolink firmware update required"
|
||||
},
|
||||
"https_webhook": {
|
||||
"description": "Reolink products can not push motion events to an HTTPS address (SSL), please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}). The current (local) address is: `{base_url}`, a valid address could, for example, be `http://192.168.1.10:8123` where `192.168.1.10` is the IP of the Home Assistant device",
|
||||
"description": "Reolink products can not push motion events to an HTTPS address (SSL), please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}). The current (local) address is: `{base_url}`, a valid address could, for example, be `{example_url}` where `{example_ip}` is the IP of the Home Assistant device",
|
||||
"title": "Reolink webhook URL uses HTTPS (SSL)"
|
||||
},
|
||||
"password_too_long": {
|
||||
@@ -1016,7 +1016,7 @@
|
||||
"title": "Reolink incompatible with global SSL certificate"
|
||||
},
|
||||
"webhook_url": {
|
||||
"description": "Did not receive initial ONVIF state from {name}. Most likely, the Reolink camera can not reach the current (local) Home Assistant URL `{base_url}`, please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}) that points to Home Assistant. For example `http://192.168.1.10:8123` where `192.168.1.10` is the IP of the Home Assistant device. Also, make sure the Reolink camera can reach that URL. Using fast motion/AI state polling until the first ONVIF push is received.",
|
||||
"description": "Did not receive initial ONVIF state from {name}. Most likely, the Reolink camera can not reach the current (local) Home Assistant URL `{base_url}`, please configure a (local) HTTP address under \"Home Assistant URL\" in the [network settings]({network_link}) that points to Home Assistant. For example `{example_url}` where `{example_ip}` is the IP of the Home Assistant device. Also, make sure the Reolink camera can reach that URL. Using fast motion/AI state polling until the first ONVIF push is received.",
|
||||
"title": "Reolink webhook URL unreachable"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
"""The Rituals Perfume Genie integration."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pyrituals import Account, Diffuser
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from pyrituals import Account, AuthenticationException, Diffuser
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import ACCOUNT_HASH, DOMAIN, UPDATE_INTERVAL
|
||||
from .coordinator import RitualsDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.NUMBER,
|
||||
@@ -26,12 +29,38 @@ PLATFORMS = [
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Rituals Perfume Genie from a config entry."""
|
||||
# Initiate reauth for old config entries which don't have username / password in the entry data
|
||||
if CONF_EMAIL not in entry.data or CONF_PASSWORD not in entry.data:
|
||||
raise ConfigEntryAuthFailed("Missing credentials")
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
account = Account(session=session, account_hash=entry.data[ACCOUNT_HASH])
|
||||
|
||||
account = Account(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
# Authenticate first so API token/cookies are available for subsequent calls
|
||||
await account.authenticate()
|
||||
account_devices = await account.get_devices()
|
||||
except aiohttp.ClientError as err:
|
||||
|
||||
except AuthenticationException as err:
|
||||
# Credentials invalid/expired -> raise AuthFailed to trigger reauth flow
|
||||
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug(
|
||||
"HTTP error during Rituals setup: status=%s, url=%s, headers=%s",
|
||||
err.status,
|
||||
err.request_info,
|
||||
dict(err.headers or {}),
|
||||
)
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
except ClientError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
# Migrate old unique_ids to the new format
|
||||
@@ -45,7 +74,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Create a coordinator for each diffuser
|
||||
coordinators = {
|
||||
diffuser.hublot: RitualsDataUpdateCoordinator(
|
||||
hass, entry, diffuser, update_interval
|
||||
hass, entry, account, diffuser, update_interval
|
||||
)
|
||||
for diffuser in account_devices
|
||||
}
|
||||
@@ -106,3 +135,14 @@ def async_migrate_entities_unique_ids(
|
||||
registry_entry.entity_id,
|
||||
new_unique_id=f"{diffuser.hublot}-{new_unique_id}",
|
||||
)
|
||||
|
||||
|
||||
# Migration helpers for API v2
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate config entry to version 2: drop legacy ACCOUNT_HASH and bump version."""
|
||||
if entry.version < 2:
|
||||
data = dict(entry.data)
|
||||
data.pop(ACCOUNT_HASH, None)
|
||||
hass.config_entries.async_update_entry(entry, data=data, version=2)
|
||||
return True
|
||||
return True
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from collections.abc import Mapping
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from aiohttp import ClientError
|
||||
from pyrituals import Account, AuthenticationException
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -13,9 +13,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import ACCOUNT_HASH, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -28,39 +26,88 @@ DATA_SCHEMA = vol.Schema(
|
||||
class RitualsPerfumeGenieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Rituals Perfume Genie."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)
|
||||
|
||||
errors = {}
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
account = Account(user_input[CONF_EMAIL], user_input[CONF_PASSWORD], session)
|
||||
|
||||
try:
|
||||
await account.authenticate()
|
||||
except ClientResponseError:
|
||||
_LOGGER.exception("Unexpected response")
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(account.email)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=account.email,
|
||||
data={ACCOUNT_HASH: account.account_hash},
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
account = Account(
|
||||
email=user_input[CONF_EMAIL],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
await account.authenticate()
|
||||
except AuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except ClientError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_EMAIL])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL],
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication with Rituals."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Form to log in again."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert reauth_entry.unique_id is not None
|
||||
|
||||
if user_input:
|
||||
session = async_get_clientsession(self.hass)
|
||||
account = Account(
|
||||
email=reauth_entry.unique_id,
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
await account.authenticate()
|
||||
except AuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except ClientError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data={
|
||||
CONF_EMAIL: reauth_entry.unique_id,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
reauth_entry.data,
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ from datetime import timedelta
|
||||
|
||||
DOMAIN = "rituals_perfume_genie"
|
||||
|
||||
# Old (API V1)
|
||||
ACCOUNT_HASH = "account_hash"
|
||||
|
||||
# The API provided by Rituals is currently rate limited to 30 requests
|
||||
|
||||
@@ -3,11 +3,13 @@
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyrituals import Diffuser
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from pyrituals import Account, AuthenticationException, Diffuser
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -23,10 +25,12 @@ class RitualsDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
account: Account,
|
||||
diffuser: Diffuser,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize global Rituals Perfume Genie data updater."""
|
||||
self.account = account
|
||||
self.diffuser = diffuser
|
||||
super().__init__(
|
||||
hass,
|
||||
@@ -37,5 +41,36 @@ class RitualsDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data from Rituals."""
|
||||
await self.diffuser.update_data()
|
||||
"""Fetch data from Rituals, with one silent re-auth on 401.
|
||||
|
||||
If silent re-auth also fails, raise ConfigEntryAuthFailed to trigger reauth flow.
|
||||
Other HTTP/network errors are wrapped in UpdateFailed so HA can retry.
|
||||
"""
|
||||
try:
|
||||
await self.diffuser.update_data()
|
||||
except (AuthenticationException, ClientResponseError) as err:
|
||||
# Treat 401/403 like AuthenticationException → one silent re-auth, single retry
|
||||
if isinstance(err, ClientResponseError) and (status := err.status) not in (
|
||||
401,
|
||||
403,
|
||||
):
|
||||
# Non-auth HTTP error → let HA retry
|
||||
raise UpdateFailed(f"HTTP {status}") from err
|
||||
|
||||
self.logger.debug(
|
||||
"Auth issue detected (%r). Attempting silent re-auth.", err
|
||||
)
|
||||
try:
|
||||
await self.account.authenticate()
|
||||
await self.diffuser.update_data()
|
||||
except AuthenticationException as err2:
|
||||
# Credentials invalid → trigger HA reauth
|
||||
raise ConfigEntryAuthFailed from err2
|
||||
except ClientResponseError as err2:
|
||||
# Still HTTP auth errors after refresh → trigger HA reauth
|
||||
if err2.status in (401, 403):
|
||||
raise ConfigEntryAuthFailed from err2
|
||||
raise UpdateFailed(f"HTTP {err2.status}") from err2
|
||||
except ClientError as err:
|
||||
# Network issues (timeouts, DNS, etc.)
|
||||
raise UpdateFailed(f"Network error: {err!r}") from err
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"domain": "rituals_perfume_genie",
|
||||
"name": "Rituals Perfume Genie",
|
||||
"codeowners": ["@milanmeu", "@frenck"],
|
||||
"codeowners": ["@milanmeu", "@frenck", "@quebulm"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rituals_perfume_genie",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyrituals"],
|
||||
"requirements": ["pyrituals==0.0.6"]
|
||||
"requirements": ["pyrituals==0.0.7"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "Re-authentication was successful"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -9,6 +10,12 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"description": "Please enter the correct password."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
|
||||
@@ -17,14 +17,24 @@ from roborock import (
|
||||
from roborock.data import UserData
|
||||
from roborock.devices.device import RoborockDevice
|
||||
from roborock.devices.device_manager import UserParams, create_device_manager
|
||||
from roborock.map.map_parser import MapParserConfig
|
||||
|
||||
from homeassistant.const import CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_BASE_URL, CONF_USER_DATA, DOMAIN, PLATFORMS
|
||||
from .const import (
|
||||
CONF_BASE_URL,
|
||||
CONF_SHOW_BACKGROUND,
|
||||
CONF_USER_DATA,
|
||||
DEFAULT_DRAWABLES,
|
||||
DOMAIN,
|
||||
DRAWABLES,
|
||||
MAP_SCALE,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .coordinator import (
|
||||
RoborockConfigEntry,
|
||||
RoborockCoordinators,
|
||||
@@ -56,6 +66,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
user_params,
|
||||
cache=cache,
|
||||
session=async_get_clientsession(hass),
|
||||
map_parser_config=MapParserConfig(
|
||||
drawables=[
|
||||
drawable
|
||||
for drawable, default_value in DEFAULT_DRAWABLES.items()
|
||||
if entry.options.get(DRAWABLES, {}).get(drawable, default_value)
|
||||
],
|
||||
show_background=entry.options.get(CONF_SHOW_BACKGROUND, False),
|
||||
map_scale=MAP_SCALE,
|
||||
),
|
||||
)
|
||||
except RoborockInvalidCredentials as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -80,10 +99,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="home_data_fail",
|
||||
) from err
|
||||
|
||||
async def shutdown_roborock(_: Event | None = None) -> None:
|
||||
await asyncio.gather(device_manager.close(), cache.flush())
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_roborock)
|
||||
)
|
||||
entry.async_on_unload(shutdown_roborock)
|
||||
|
||||
devices = await device_manager.get_devices()
|
||||
_LOGGER.debug("Device manager found %d devices", len(devices))
|
||||
for device in devices:
|
||||
entry.async_on_unload(device.close)
|
||||
|
||||
coordinators = await asyncio.gather(
|
||||
*build_setup_functions(hass, entry, devices, user_data),
|
||||
@@ -105,25 +131,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_coordinators",
|
||||
)
|
||||
valid_coordinators = RoborockCoordinators(v1_coords, a01_coords)
|
||||
|
||||
async def on_stop(_: Any) -> None:
|
||||
_LOGGER.debug("Shutting down roborock")
|
||||
await asyncio.gather(
|
||||
*(
|
||||
coordinator.async_shutdown()
|
||||
for coordinator in valid_coordinators.values()
|
||||
),
|
||||
cache.flush(),
|
||||
)
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
on_stop,
|
||||
)
|
||||
)
|
||||
entry.runtime_data = valid_coordinators
|
||||
entry.runtime_data = RoborockCoordinators(v1_coords, a01_coords)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -394,7 +394,14 @@ class RoborockWashingMachineUpdateCoordinator(
|
||||
async def _async_update_data(
|
||||
self,
|
||||
) -> dict[RoborockZeoProtocol, StateType]:
|
||||
return await self.api.query_values(self.request_protocols)
|
||||
try:
|
||||
return await self.api.query_values(self.request_protocols)
|
||||
except RoborockException as ex:
|
||||
_LOGGER.debug("Failed to update washing machine data: %s", ex)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_data_fail",
|
||||
) from ex
|
||||
|
||||
|
||||
class RoborockWetDryVacUpdateCoordinator(
|
||||
@@ -425,4 +432,11 @@ class RoborockWetDryVacUpdateCoordinator(
|
||||
async def _async_update_data(
|
||||
self,
|
||||
) -> dict[RoborockDyadDataProtocol, StateType]:
|
||||
return await self.api.query_values(self.request_protocols)
|
||||
try:
|
||||
return await self.api.query_values(self.request_protocols)
|
||||
except RoborockException as ex:
|
||||
_LOGGER.debug("Failed to update wet dry vac data: %s", ex)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_data_fail",
|
||||
) from ex
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==3.9.3",
|
||||
"python-roborock==3.12.2",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from datetime import time
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from roborock.data import DnDTimer
|
||||
from roborock.data import DnDTimer, ValleyElectricityTimer
|
||||
from roborock.exceptions import RoborockException
|
||||
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
@@ -80,13 +80,14 @@ TIME_DESCRIPTIONS: list[RoborockTimeDescription] = [
|
||||
key="off_peak_start",
|
||||
translation_key="off_peak_start",
|
||||
trait=lambda api: api.valley_electricity_timer,
|
||||
update_value=lambda trait, desired_time: trait.update_value(
|
||||
[
|
||||
desired_time.hour,
|
||||
desired_time.minute,
|
||||
trait.end_hour,
|
||||
trait.end_minute,
|
||||
]
|
||||
update_value=lambda trait, desired_time: trait.set_timer(
|
||||
ValleyElectricityTimer(
|
||||
enabled=trait.enabled,
|
||||
start_hour=desired_time.hour,
|
||||
start_minute=desired_time.minute,
|
||||
end_hour=trait.end_hour,
|
||||
end_minute=trait.end_minute,
|
||||
)
|
||||
),
|
||||
get_value=lambda trait: datetime.time(
|
||||
hour=trait.start_hour, minute=trait.start_minute
|
||||
@@ -98,13 +99,14 @@ TIME_DESCRIPTIONS: list[RoborockTimeDescription] = [
|
||||
key="off_peak_end",
|
||||
translation_key="off_peak_end",
|
||||
trait=lambda api: api.valley_electricity_timer,
|
||||
update_value=lambda trait, desired_time: trait.update_value(
|
||||
[
|
||||
trait.start_hour,
|
||||
trait.start_minute,
|
||||
desired_time.hour,
|
||||
desired_time.minute,
|
||||
]
|
||||
update_value=lambda trait, desired_time: trait.set_timer(
|
||||
ValleyElectricityTimer(
|
||||
enabled=trait.enabled,
|
||||
start_hour=trait.start_hour,
|
||||
start_minute=trait.start_minute,
|
||||
end_hour=desired_time.hour,
|
||||
end_minute=desired_time.minute,
|
||||
)
|
||||
),
|
||||
get_value=lambda trait: datetime.time(
|
||||
hour=trait.end_hour, minute=trait.end_minute
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sharkiq"],
|
||||
"requirements": ["sharkiq==1.4.2"]
|
||||
"requirements": ["sharkiq==1.5.0"]
|
||||
}
|
||||
|
||||
@@ -170,6 +170,9 @@ async def _async_setup_block_entry(
|
||||
device_entry = dev_reg.async_get_device(
|
||||
connections={(CONNECTION_NETWORK_MAC, dr.format_mac(entry.unique_id))},
|
||||
)
|
||||
# https://github.com/home-assistant/core/pull/48076
|
||||
if device_entry and entry.entry_id not in device_entry.config_entries:
|
||||
device_entry = None
|
||||
|
||||
sleep_period = entry.data.get(CONF_SLEEP_PERIOD)
|
||||
runtime_data = entry.runtime_data
|
||||
@@ -280,6 +283,9 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry)
|
||||
device_entry = dev_reg.async_get_device(
|
||||
connections={(CONNECTION_NETWORK_MAC, dr.format_mac(entry.unique_id))},
|
||||
)
|
||||
# https://github.com/home-assistant/core/pull/48076
|
||||
if device_entry and entry.entry_id not in device_entry.config_entries:
|
||||
device_entry = None
|
||||
|
||||
sleep_period = entry.data.get(CONF_SLEEP_PERIOD)
|
||||
runtime_data = entry.runtime_data
|
||||
|
||||
@@ -44,6 +44,7 @@ from .entity import (
|
||||
)
|
||||
from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
async_remove_shelly_entity,
|
||||
format_ble_addr,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
@@ -80,6 +81,7 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action="trigger_reboot",
|
||||
supported=lambda coordinator: coordinator.sleep_period == 0,
|
||||
),
|
||||
ShellyButtonDescription[ShellyBlockCoordinator](
|
||||
key="self_test",
|
||||
@@ -197,7 +199,8 @@ async def async_setup_entry(
|
||||
"""Set up button entities."""
|
||||
entry_data = config_entry.runtime_data
|
||||
coordinator: ShellyRpcCoordinator | ShellyBlockCoordinator | None
|
||||
if get_device_entry_gen(config_entry) in RPC_GENERATIONS:
|
||||
device_gen = get_device_entry_gen(config_entry)
|
||||
if device_gen in RPC_GENERATIONS:
|
||||
coordinator = entry_data.rpc
|
||||
else:
|
||||
coordinator = entry_data.block
|
||||
@@ -210,6 +213,12 @@ async def async_setup_entry(
|
||||
hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator)
|
||||
)
|
||||
|
||||
# Remove the 'restart' button for sleeping devices as it was mistakenly
|
||||
# added in https://github.com/home-assistant/core/pull/154673
|
||||
entry_sleep_period = config_entry.data[CONF_SLEEP_PERIOD]
|
||||
if device_gen in RPC_GENERATIONS and entry_sleep_period:
|
||||
async_remove_shelly_entity(hass, BUTTON_PLATFORM, f"{coordinator.mac}-reboot")
|
||||
|
||||
entities: list[ShellyButton] = []
|
||||
|
||||
entities.extend(
|
||||
@@ -224,7 +233,7 @@ async def async_setup_entry(
|
||||
return
|
||||
|
||||
# add RPC buttons
|
||||
if config_entry.data[CONF_SLEEP_PERIOD]:
|
||||
if entry_sleep_period:
|
||||
async_setup_entry_rpc(
|
||||
hass,
|
||||
config_entry,
|
||||
|
||||
@@ -31,5 +31,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.5.0"]
|
||||
"requirements": ["pysmartthings==3.5.1"]
|
||||
}
|
||||
|
||||
@@ -72,7 +72,6 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]):
|
||||
def _get_starlink_data(self) -> StarlinkData:
|
||||
"""Retrieve Starlink data."""
|
||||
context = self.channel_context
|
||||
status = status_data(context)
|
||||
location = location_data(context)
|
||||
sleep = get_sleep_config(context)
|
||||
status, obstruction, alert = status_data(context)
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util.dt import now
|
||||
from homeassistant.util.variance import ignore_variance
|
||||
|
||||
from .coordinator import StarlinkConfigEntry, StarlinkData
|
||||
from .entity import StarlinkEntity
|
||||
@@ -91,6 +92,10 @@ class StarlinkAccumulationSensor(StarlinkSensorEntity, RestoreSensor):
|
||||
self._attr_native_value = last_native_value
|
||||
|
||||
|
||||
uptime_to_stable_datetime = ignore_variance(
|
||||
lambda value: now() - timedelta(seconds=value), timedelta(minutes=1)
|
||||
)
|
||||
|
||||
SENSORS: tuple[StarlinkSensorEntityDescription, ...] = (
|
||||
StarlinkSensorEntityDescription(
|
||||
key="ping",
|
||||
@@ -150,9 +155,7 @@ SENSORS: tuple[StarlinkSensorEntityDescription, ...] = (
|
||||
translation_key="last_restart",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: (
|
||||
now() - timedelta(seconds=data.status["uptime"], milliseconds=-500)
|
||||
).replace(microsecond=0),
|
||||
value_fn=lambda data: uptime_to_stable_datetime(data.status["uptime"]),
|
||||
entity_class=StarlinkSensorEntity,
|
||||
),
|
||||
StarlinkSensorEntityDescription(
|
||||
|
||||
@@ -147,16 +147,16 @@ class SwitchBotCloudBinarySensor(SwitchBotCloudEntity, BinarySensorEntity):
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{device.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
def _set_attributes(self) -> None:
|
||||
"""Set attributes from coordinator data."""
|
||||
if not self.coordinator.data:
|
||||
return None
|
||||
return
|
||||
|
||||
if self.entity_description.value_fn:
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
self._attr_is_on = self.entity_description.value_fn(self.coordinator.data)
|
||||
return
|
||||
|
||||
return (
|
||||
self._attr_is_on = (
|
||||
self.coordinator.data.get(self.entity_description.key)
|
||||
== self.entity_description.on_value
|
||||
)
|
||||
|
||||
@@ -422,6 +422,9 @@ async def async_setup_entry(
|
||||
},
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
description_placeholders={
|
||||
"syntax_keys_documentation_url": "http://robotjs.io/docs/syntax#keys"
|
||||
},
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DATA_WAIT_TIMEOUT, DOMAIN, SYNTAX_KEYS_DOCUMENTATION_URL
|
||||
from .const import DATA_WAIT_TIMEOUT, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -134,9 +134,6 @@ class SystemBridgeConfigFlow(
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
description_placeholders={
|
||||
"syntax_keys_documentation_url": SYNTAX_KEYS_DOCUMENTATION_URL
|
||||
},
|
||||
)
|
||||
|
||||
errors, info = await _async_get_info(self.hass, user_input)
|
||||
@@ -151,9 +148,6 @@ class SystemBridgeConfigFlow(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"syntax_keys_documentation_url": SYNTAX_KEYS_DOCUMENTATION_URL
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_authenticate(
|
||||
@@ -185,7 +179,6 @@ class SystemBridgeConfigFlow(
|
||||
data_schema=STEP_AUTHENTICATE_DATA_SCHEMA,
|
||||
description_placeholders={
|
||||
"name": self._name,
|
||||
"syntax_keys_documentation_url": SYNTAX_KEYS_DOCUMENTATION_URL,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -4,8 +4,6 @@ from typing import Final
|
||||
|
||||
from systembridgemodels.modules import Module
|
||||
|
||||
SYNTAX_KEYS_DOCUMENTATION_URL = "http://robotjs.io/docs/syntax#keys"
|
||||
|
||||
DOMAIN = "system_bridge"
|
||||
|
||||
MODULES: Final[list[Module]] = [
|
||||
|
||||
@@ -524,6 +524,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async_send_telegram_message,
|
||||
schema=schema,
|
||||
supports_response=supports_response,
|
||||
description_placeholders={
|
||||
"formatting_options_url": "https://core.telegram.org/bots/api#formatting-options"
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -64,6 +64,12 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DESCRIPTION_PLACEHOLDERS: dict[str, str] = {
|
||||
"botfather_username": "@BotFather",
|
||||
"botfather_url": "https://t.me/botfather",
|
||||
"socks_url": "socks5://username:password@proxy_ip:proxy_port",
|
||||
}
|
||||
|
||||
STEP_USER_DATA_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): SelectSelector(
|
||||
@@ -310,10 +316,7 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow to create a new config entry for a Telegram bot."""
|
||||
|
||||
description_placeholders: dict[str, str] = {
|
||||
"botfather_username": "@BotFather",
|
||||
"botfather_url": "https://t.me/botfather",
|
||||
}
|
||||
description_placeholders: dict[str, str] = DESCRIPTION_PLACEHOLDERS.copy()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
@@ -552,13 +555,14 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
},
|
||||
),
|
||||
description_placeholders=DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
user_input[CONF_PROXY_URL] = user_input[SECTION_ADVANCED_SETTINGS].get(
|
||||
CONF_PROXY_URL
|
||||
)
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = DESCRIPTION_PLACEHOLDERS.copy()
|
||||
|
||||
user_input[CONF_API_KEY] = api_key
|
||||
bot_name = await self._validate_bot(
|
||||
|
||||
@@ -60,7 +60,7 @@
|
||||
"proxy_url": "Proxy URL"
|
||||
},
|
||||
"data_description": {
|
||||
"proxy_url": "Proxy URL if working behind one, optionally including username and password.\n(socks5://username:password@proxy_ip:proxy_port)"
|
||||
"proxy_url": "Proxy URL if working behind one, optionally including username and password.\n({socks_url})"
|
||||
},
|
||||
"name": "Advanced settings"
|
||||
}
|
||||
@@ -400,7 +400,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the media.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"chat_id": {
|
||||
@@ -499,7 +499,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the animation.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
@@ -600,7 +600,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the document.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
@@ -745,7 +745,7 @@
|
||||
"name": "Keyboard"
|
||||
},
|
||||
"message": {
|
||||
"description": "Message body of the notification.",
|
||||
"description": "Message body of the notification.\nCan't parse entities? Format your message according to the [formatting options]({formatting_options_url}).",
|
||||
"name": "Message"
|
||||
},
|
||||
"message_tag": {
|
||||
@@ -757,7 +757,7 @@
|
||||
"name": "Message thread ID"
|
||||
},
|
||||
"parse_mode": {
|
||||
"description": "Parser for the message text.",
|
||||
"description": "Parser for the message text.\nSee [formatting options]({formatting_options_url}) for more details.",
|
||||
"name": "Parse mode"
|
||||
},
|
||||
"reply_to_message_id": {
|
||||
@@ -787,7 +787,7 @@
|
||||
"name": "Authentication method"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the image.",
|
||||
"description": "The title of the media.\nCan't parse entities? Format your message according to the [formatting options]({formatting_options_url}).",
|
||||
"name": "Caption"
|
||||
},
|
||||
"config_entry_id": {
|
||||
@@ -991,7 +991,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the video.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
@@ -1070,7 +1070,7 @@
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::authentication::name%]"
|
||||
},
|
||||
"caption": {
|
||||
"description": "The title of the voice message.",
|
||||
"description": "[%key:component::telegram_bot::services::send_photo::fields::caption::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_photo::fields::caption::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
import itertools
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -346,12 +347,21 @@ async def async_validate_config_section(
|
||||
|
||||
async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> ConfigType:
|
||||
"""Validate config."""
|
||||
if DOMAIN not in config:
|
||||
|
||||
configs = []
|
||||
for key in config:
|
||||
if DOMAIN not in key:
|
||||
continue
|
||||
|
||||
if key == DOMAIN or (key.startswith(DOMAIN) and len(key.split()) > 1):
|
||||
configs.append(cv.ensure_list(config[key]))
|
||||
|
||||
if not configs:
|
||||
return config
|
||||
|
||||
config_sections = []
|
||||
|
||||
for cfg in cv.ensure_list(config[DOMAIN]):
|
||||
for cfg in itertools.chain(*configs):
|
||||
try:
|
||||
template_config: TemplateConfig = await async_validate_config_section(
|
||||
hass, cfg
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Helpers for template integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
import hashlib
|
||||
import itertools
|
||||
import logging
|
||||
@@ -12,6 +12,7 @@ import voluptuous as vol
|
||||
from homeassistant.components import blueprint
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_ENTITY_PICTURE_TEMPLATE,
|
||||
CONF_FRIENDLY_NAME,
|
||||
CONF_ICON,
|
||||
@@ -33,6 +34,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
async_get_platforms,
|
||||
)
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity
|
||||
from homeassistant.helpers.script_variables import ScriptVariables
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import yaml as yaml_util
|
||||
@@ -132,6 +134,9 @@ def rewrite_legacy_to_modern_config(
|
||||
"""Rewrite legacy config."""
|
||||
entity_cfg = {**entity_cfg}
|
||||
|
||||
# Remove deprecated entity_id field from legacy syntax
|
||||
entity_cfg.pop(ATTR_ENTITY_ID, None)
|
||||
|
||||
for from_key, to_key in itertools.chain(
|
||||
LEGACY_FIELDS.items(), extra_legacy_fields.items()
|
||||
):
|
||||
@@ -190,12 +195,12 @@ def async_create_template_tracking_entities(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
def _format_template(value: Any) -> Any:
|
||||
def _format_template(value: Any, field: str | None = None) -> Any:
|
||||
if isinstance(value, template.Template):
|
||||
return value.template
|
||||
|
||||
if isinstance(value, Enum):
|
||||
return value.name
|
||||
if isinstance(value, StrEnum):
|
||||
return value.value
|
||||
|
||||
if isinstance(value, (int, float, str, bool)):
|
||||
return value
|
||||
@@ -207,14 +212,13 @@ def format_migration_config(
|
||||
config: ConfigType | list[ConfigType], depth: int = 0
|
||||
) -> ConfigType | list[ConfigType]:
|
||||
"""Recursive method to format templates as strings from ConfigType."""
|
||||
types = (dict, list)
|
||||
if depth > 9:
|
||||
raise RecursionError
|
||||
|
||||
if isinstance(config, list):
|
||||
items = []
|
||||
for item in config:
|
||||
if isinstance(item, types):
|
||||
if isinstance(item, (dict, list)):
|
||||
if len(item) > 0:
|
||||
items.append(format_migration_config(item, depth + 1))
|
||||
else:
|
||||
@@ -223,9 +227,18 @@ def format_migration_config(
|
||||
|
||||
formatted_config = {}
|
||||
for field, value in config.items():
|
||||
if isinstance(value, types):
|
||||
if isinstance(value, dict):
|
||||
if len(value) > 0:
|
||||
formatted_config[field] = format_migration_config(value, depth + 1)
|
||||
elif isinstance(value, list):
|
||||
if len(value) > 0:
|
||||
formatted_config[field] = format_migration_config(value, depth + 1)
|
||||
else:
|
||||
formatted_config[field] = []
|
||||
elif isinstance(value, ScriptVariables):
|
||||
formatted_config[field] = format_migration_config(
|
||||
value.as_dict(), depth + 1
|
||||
)
|
||||
else:
|
||||
formatted_config[field] = _format_template(value)
|
||||
|
||||
@@ -260,9 +273,9 @@ def create_legacy_template_issue(
|
||||
try:
|
||||
config.pop(CONF_PLATFORM, None)
|
||||
modified_yaml = format_migration_config(config)
|
||||
yaml_config = yaml_util.dump({DOMAIN: [{domain: [modified_yaml]}]})
|
||||
# Format to show up properly in a numbered bullet on the repair.
|
||||
yaml_config = " ```\n " + yaml_config.replace("\n", "\n ") + "```"
|
||||
yaml_config = (
|
||||
f"```\n{yaml_util.dump({DOMAIN: [{domain: [modified_yaml]}]})}\n```"
|
||||
)
|
||||
except RecursionError:
|
||||
yaml_config = f"{DOMAIN}:\n - {domain}: - ..."
|
||||
|
||||
@@ -278,6 +291,7 @@ def create_legacy_template_issue(
|
||||
"domain": domain,
|
||||
"breadcrumb": breadcrumb,
|
||||
"config": yaml_config,
|
||||
"filename": "<filename>",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -635,14 +635,14 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
# Support legacy mireds in template light.
|
||||
temperature = int(render)
|
||||
if (min_kelvin := self._attr_min_color_temp_kelvin) is not None:
|
||||
min_mireds = color_util.color_temperature_kelvin_to_mired(min_kelvin)
|
||||
else:
|
||||
min_mireds = DEFAULT_MIN_MIREDS
|
||||
|
||||
if (max_kelvin := self._attr_max_color_temp_kelvin) is not None:
|
||||
max_mireds = color_util.color_temperature_kelvin_to_mired(max_kelvin)
|
||||
max_mireds = color_util.color_temperature_kelvin_to_mired(min_kelvin)
|
||||
else:
|
||||
max_mireds = DEFAULT_MAX_MIREDS
|
||||
|
||||
if (max_kelvin := self._attr_max_color_temp_kelvin) is not None:
|
||||
min_mireds = color_util.color_temperature_kelvin_to_mired(max_kelvin)
|
||||
else:
|
||||
min_mireds = DEFAULT_MIN_MIREDS
|
||||
if min_mireds <= temperature <= max_mireds:
|
||||
self._attr_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(temperature)
|
||||
@@ -856,42 +856,36 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
|
||||
try:
|
||||
if render in (None, "None", ""):
|
||||
self._attr_max_mireds = DEFAULT_MAX_MIREDS
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
return
|
||||
|
||||
self._attr_max_mireds = max_mireds = int(render)
|
||||
self._attr_max_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(max_mireds)
|
||||
self._attr_min_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(int(render))
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.exception(
|
||||
"Template must supply an integer temperature within the range for"
|
||||
" this light, or 'None'"
|
||||
)
|
||||
self._attr_max_mireds = DEFAULT_MAX_MIREDS
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
|
||||
@callback
|
||||
def _update_min_mireds(self, render):
|
||||
"""Update the min mireds from the template."""
|
||||
try:
|
||||
if render in (None, "None", ""):
|
||||
self._attr_min_mireds = DEFAULT_MIN_MIREDS
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
return
|
||||
|
||||
self._attr_min_mireds = min_mireds = int(render)
|
||||
self._attr_min_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(min_mireds)
|
||||
self._attr_max_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(int(render))
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.exception(
|
||||
"Template must supply an integer temperature within the range for"
|
||||
" this light, or 'None'"
|
||||
)
|
||||
self._attr_min_mireds = DEFAULT_MIN_MIREDS
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
|
||||
@callback
|
||||
def _update_supports_transition(self, render):
|
||||
|
||||
@@ -529,7 +529,7 @@
|
||||
"title": "Deprecated battery level option in {entity_name}"
|
||||
},
|
||||
"deprecated_legacy_templates": {
|
||||
"description": "The legacy `platform: template` syntax for `{domain}` is being removed. Please migrate `{breadcrumb}` to the modern template syntax.\n\n1. Remove existing template definition.\n2. Add new template definition:\n{config}\n3. Restart Home Assistant or reload template entities.",
|
||||
"description": "The legacy `platform: template` syntax for `{domain}` is being removed. Please migrate `{breadcrumb}` to the modern template syntax.\n#### Step 1 - Remove legacy configuration\nRemove the `{breadcrumb}` template definition from the `configuration.yaml` `{domain}:` section.\n\n**Note:** If you are using `{domain}: !include {filename}.yaml` in `configuration.yaml`, remove the {domain} definition from the included `{filename}.yaml`.\n#### Step 2 - Add the modern configuration\nAdd new template definition inside `configuration.yaml`:\n{config}\n**Note:** If there are any existing `template:` sections in your configuration, make sure to omit the `template:` line from the yaml above. There can only be 1 `template:` section in `configuration.yaml`. Also, ensure the indentation is aligned with the existing entities within the `template:` section.\n#### Step 3 - Restart Home Assistant or reload template entities",
|
||||
"title": "Legacy {domain} template deprecation"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -325,6 +325,9 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
vol.Required(ATTR_TOU_SETTINGS): dict,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"time_of_use_url": "https://developer.tesla.com/docs/fleet-api#time_of_use_settings"
|
||||
},
|
||||
)
|
||||
|
||||
async def add_charge_schedule(call: ServiceCall) -> None:
|
||||
|
||||
@@ -1358,7 +1358,7 @@
|
||||
"name": "Energy Site"
|
||||
},
|
||||
"tou_settings": {
|
||||
"description": "See https://developer.tesla.com/docs/fleet-api#time_of_use_settings for details.",
|
||||
"description": "See {time_of_use_url} for details.",
|
||||
"name": "Settings"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -125,6 +125,18 @@ def _get_temperature_wrappers(
|
||||
device, DPCode.TEMP_SET_F, prefer_function=True
|
||||
)
|
||||
|
||||
# If there is a temp unit convert dpcode, override empty units
|
||||
if (
|
||||
temp_unit_convert := DPCodeEnumWrapper.find_dpcode(
|
||||
device, DPCode.TEMP_UNIT_CONVERT
|
||||
)
|
||||
) is not None:
|
||||
for wrapper in (temp_current, temp_current_f, temp_set, temp_set_f):
|
||||
if wrapper is not None and not wrapper.type_information.unit:
|
||||
wrapper.type_information.unit = temp_unit_convert.read_device_status(
|
||||
device
|
||||
)
|
||||
|
||||
# Get wrappers for celsius and fahrenheit
|
||||
# We need to check the unit of measurement
|
||||
current_celsius = _get_temperature_wrapper(
|
||||
@@ -421,23 +433,23 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
return self._read_wrapper(self._target_humidity_wrapper)
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return hvac mode."""
|
||||
# If the switch is off, hvac mode is off as well.
|
||||
# Unless the switch doesn't exists of course...
|
||||
# If the switch is off, hvac mode is off.
|
||||
switch_status: bool | None
|
||||
if (switch_status := self._read_wrapper(self._switch_wrapper)) is False:
|
||||
return HVACMode.OFF
|
||||
|
||||
# If the mode is known and maps to an HVAC mode, return it.
|
||||
if (mode := self._read_wrapper(self._hvac_mode_wrapper)) and (
|
||||
hvac_mode := TUYA_HVAC_TO_HA.get(mode)
|
||||
):
|
||||
return hvac_mode
|
||||
# If we don't have a mode wrapper, return switch only mode.
|
||||
if self._hvac_mode_wrapper is None:
|
||||
if switch_status is True:
|
||||
return self.entity_description.switch_only_hvac_mode
|
||||
return None
|
||||
|
||||
# If hvac_mode is unknown, return the switch only mode.
|
||||
if switch_status:
|
||||
return self.entity_description.switch_only_hvac_mode
|
||||
return HVACMode.OFF
|
||||
# If we do have a mode wrapper, check if the mode maps to an HVAC mode.
|
||||
if (hvac_status := self._read_wrapper(self._hvac_mode_wrapper)) is None:
|
||||
return None
|
||||
return TUYA_HVAC_TO_HA.get(hvac_status)
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
|
||||
@@ -98,6 +98,7 @@ def _async_device_as_dict(
|
||||
"home_assistant": {},
|
||||
"set_up": device.set_up,
|
||||
"support_local": device.support_local,
|
||||
"local_strategy": device.local_strategy,
|
||||
"warnings": DEVICE_WARNINGS.get(device.id),
|
||||
}
|
||||
|
||||
|
||||
@@ -127,12 +127,12 @@ class BitmapTypeInformation(TypeInformation):
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: str, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return a BitmapTypeInformation object."""
|
||||
if not (parsed := json_loads_object(type_data)):
|
||||
if not (parsed := cast(dict[str, Any] | None, json_loads_object(type_data))):
|
||||
return None
|
||||
return cls(
|
||||
dpcode=dpcode,
|
||||
type_data=type_data,
|
||||
**cast(dict[str, list[str]], parsed),
|
||||
label=parsed["label"],
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"requirements": ["uiprotect==7.31.0", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==7.33.2", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -66,7 +66,7 @@ async def async_setup_entry(
|
||||
@callback
|
||||
def discover(devices):
|
||||
"""Add new devices to platform."""
|
||||
_setup_entities(devices, async_add_entities)
|
||||
_setup_entities(devices, async_add_entities, coordinator)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(hass, VS_DISCOVERY.format(VS_DEVICES), discover)
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from . import (
|
||||
DEFAULT_METHODS,
|
||||
@@ -62,7 +63,9 @@ async def _handle_webhook(
|
||||
base_result: dict[str, Any] = {"platform": "webhook", "webhook_id": webhook_id}
|
||||
|
||||
if "json" in request.headers.get(hdrs.CONTENT_TYPE, ""):
|
||||
base_result["json"] = await request.json()
|
||||
# Always attempt to read the body; request.text() returns "" if empty
|
||||
text = await request.text()
|
||||
base_result["json"] = json_loads(text) if text else {}
|
||||
else:
|
||||
base_result["data"] = await request.post()
|
||||
|
||||
|
||||
@@ -19,6 +19,8 @@ from pythonxbox.api.provider.smartglass.models import (
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType
|
||||
|
||||
from .entity import to_https
|
||||
|
||||
|
||||
class MediaTypeDetails(NamedTuple):
|
||||
"""Details for media type."""
|
||||
@@ -151,5 +153,5 @@ def _find_media_image(images: list[Image]) -> str | None:
|
||||
if match := next(
|
||||
(image for image in images if image.image_purpose == purpose), None
|
||||
):
|
||||
return f"https:{match.uri}" if match.uri.startswith("/") else match.uri
|
||||
return to_https(match.uri)
|
||||
return None
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user