mirror of
https://github.com/home-assistant/core.git
synced 2025-10-19 16:49:30 +00:00
Compare commits
142 Commits
trigger_ac
...
2025.10.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
bb98ed6633 | ||
![]() |
59dace572a | ||
![]() |
735cf36a5b | ||
![]() |
90b0f50b8f | ||
![]() |
e731c07b77 | ||
![]() |
2c75635e95 | ||
![]() |
1f031695c2 | ||
![]() |
fb279212a9 | ||
![]() |
45869523d0 | ||
![]() |
a753926f22 | ||
![]() |
dc874ff53a | ||
![]() |
3ef6865708 | ||
![]() |
7f1989f9f2 | ||
![]() |
97e338c760 | ||
![]() |
101679c17d | ||
![]() |
bc784c356e | ||
![]() |
556cc57d8b | ||
![]() |
eef6e96a93 | ||
![]() |
56d237af7f | ||
![]() |
e5d1902d2a | ||
![]() |
a9a203678e | ||
![]() |
7f6237cc63 | ||
![]() |
5468e691ca | ||
![]() |
67cbbc3522 | ||
![]() |
504da54c11 | ||
![]() |
cdda2ef5c8 | ||
![]() |
f405f9eb4b | ||
![]() |
634f71835a | ||
![]() |
49bfb01fac | ||
![]() |
ad8f7fdcab | ||
![]() |
f82ec81062 | ||
![]() |
03b0842a01 | ||
![]() |
13e5cb5cc8 | ||
![]() |
f18cdaf4d8 | ||
![]() |
5b3bca1426 | ||
![]() |
d812e9d43c | ||
![]() |
fa1071b221 | ||
![]() |
e48c2c6c0b | ||
![]() |
bddd4100c0 | ||
![]() |
70d8df2e95 | ||
![]() |
08b3dd0173 | ||
![]() |
6723a7c4e1 | ||
![]() |
40d7f2a89e | ||
![]() |
13b717e2da | ||
![]() |
5fcfd3ad84 | ||
![]() |
324a7b5443 | ||
![]() |
491ae8f72c | ||
![]() |
259247892f | ||
![]() |
caeda0ef64 | ||
![]() |
df35c535e4 | ||
![]() |
f93b9e0ed0 | ||
![]() |
48a3372cf2 | ||
![]() |
d84fd72428 | ||
![]() |
e8cb386962 | ||
![]() |
5ac726703c | ||
![]() |
688649a799 | ||
![]() |
c5359ade3e | ||
![]() |
4e60dedc1b | ||
![]() |
221d74f83a | ||
![]() |
fbbb3d6415 | ||
![]() |
8297019011 | ||
![]() |
61715dcff3 | ||
![]() |
32b822ee99 | ||
![]() |
e6c2e0ad80 | ||
![]() |
1314427dc5 | ||
![]() |
bf499a45f7 | ||
![]() |
b955e22628 | ||
![]() |
1b222ff5fd | ||
![]() |
f0510e703f | ||
![]() |
cbe3956e15 | ||
![]() |
4588e9da8d | ||
![]() |
5445890fdf | ||
![]() |
9b49f77f86 | ||
![]() |
566c8fb786 | ||
![]() |
b36150c213 | ||
![]() |
809070d2ad | ||
![]() |
f4339dc031 | ||
![]() |
f3b37d24b0 | ||
![]() |
4c8348caa7 | ||
![]() |
b9e7c102ea | ||
![]() |
69d9fa89b7 | ||
![]() |
6f3f5a5ec1 | ||
![]() |
5ecfeca90a | ||
![]() |
00e0570fd4 | ||
![]() |
5a5b94f3af | ||
![]() |
34f00d9b33 | ||
![]() |
4cabc5b368 | ||
![]() |
4045125422 | ||
![]() |
d7393af76f | ||
![]() |
ad41386b27 | ||
![]() |
62d17ea20c | ||
![]() |
c4954731d0 | ||
![]() |
647723d3f0 | ||
![]() |
51c500e22c | ||
![]() |
f6fc13c1f2 | ||
![]() |
0009a7a042 | ||
![]() |
a3d1aa28e7 | ||
![]() |
9f53eb9b76 | ||
![]() |
f53a205ff3 | ||
![]() |
d08517c3df | ||
![]() |
d7398a44a1 | ||
![]() |
9acfc0cb88 | ||
![]() |
1b3d21523a | ||
![]() |
1d407d1326 | ||
![]() |
013346cead | ||
![]() |
5abaabc9da | ||
![]() |
32481312c3 | ||
![]() |
bdc9eb37d3 | ||
![]() |
e0afcbc02b | ||
![]() |
cd56a6a98d | ||
![]() |
9d85893bbb | ||
![]() |
9e8a70225f | ||
![]() |
96ec795d5e | ||
![]() |
65b796070d | ||
![]() |
32994812e5 | ||
![]() |
66ff9d63a3 | ||
![]() |
b2a63d4996 | ||
![]() |
f9f37b7f2a | ||
![]() |
7bdd9dd38a | ||
![]() |
1e8aae0a89 | ||
![]() |
cf668e9dc2 | ||
![]() |
2e91c8700f | ||
![]() |
9d14627daa | ||
![]() |
73b8283748 | ||
![]() |
edeaaa2e63 | ||
![]() |
d26dd8fc39 | ||
![]() |
34640ea735 | ||
![]() |
46a2e21ef0 | ||
![]() |
508af53e72 | ||
![]() |
5f7440608c | ||
![]() |
0d1aa38a26 | ||
![]() |
929f8c148a | ||
![]() |
92db1f5a04 | ||
![]() |
e66b5ce0bf | ||
![]() |
1e17150e9f | ||
![]() |
792902de3d | ||
![]() |
04d78c3dd5 | ||
![]() |
5c8d5bfb84 | ||
![]() |
99bff31869 | ||
![]() |
d949119fb0 | ||
![]() |
e7b737ece5 | ||
![]() |
fb8ddac2e8 |
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -760,8 +760,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @MaestroOnICe
|
||||
/tests/components/iometer/ @MaestroOnICe
|
||||
/homeassistant/components/iometer/ @jukrebs
|
||||
/tests/components/iometer/ @jukrebs
|
||||
/homeassistant/components/ios/ @robbiet480
|
||||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)
|
||||
|
@@ -1,6 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"air_quality": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"cloud_ceiling": {
|
||||
"default": "mdi:weather-fog"
|
||||
},
|
||||
@@ -34,9 +37,6 @@
|
||||
"thunderstorm_probability_night": {
|
||||
"default": "mdi:weather-lightning"
|
||||
},
|
||||
"translation_key": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"tree_pollen": {
|
||||
"default": "mdi:tree-outline"
|
||||
},
|
||||
|
@@ -1,7 +1,9 @@
|
||||
"""Airgradient Update platform."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airgradient import AirGradientConnectionError
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||
@@ -13,6 +15,7 @@ from .entity import AirGradientEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
SCAN_INTERVAL = timedelta(hours=1)
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -31,6 +34,7 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
||||
"""Representation of Airgradient Update."""
|
||||
|
||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||
_server_unreachable_logged = False
|
||||
|
||||
def __init__(self, coordinator: AirGradientCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
@@ -47,10 +51,27 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
||||
"""Return the installed version of the entity."""
|
||||
return self.coordinator.data.measures.firmware_version
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self._attr_available
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity."""
|
||||
self._attr_latest_version = (
|
||||
await self.coordinator.client.get_latest_firmware_version(
|
||||
self.coordinator.serial_number
|
||||
try:
|
||||
self._attr_latest_version = (
|
||||
await self.coordinator.client.get_latest_firmware_version(
|
||||
self.coordinator.serial_number
|
||||
)
|
||||
)
|
||||
)
|
||||
except AirGradientConnectionError:
|
||||
self._attr_latest_version = None
|
||||
self._attr_available = False
|
||||
if not self._server_unreachable_logged:
|
||||
_LOGGER.error(
|
||||
"Unable to connect to AirGradient server to check for updates"
|
||||
)
|
||||
self._server_unreachable_logged = True
|
||||
else:
|
||||
self._server_unreachable_logged = False
|
||||
self._attr_available = True
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.1"]
|
||||
"requirements": ["airos==0.5.5"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.4.6"]
|
||||
"requirements": ["aioairq==0.4.7"]
|
||||
}
|
||||
|
@@ -2,17 +2,14 @@
|
||||
|
||||
from airtouch4pyapi import AirTouch
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .coordinator import AirtouchDataUpdateCoordinator
|
||||
from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
|
||||
"""Set up AirTouch4 from a config entry."""
|
||||
@@ -22,7 +19,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) ->
|
||||
info = airtouch.GetAcs()
|
||||
if not info:
|
||||
raise ConfigEntryNotReady
|
||||
coordinator = AirtouchDataUpdateCoordinator(hass, airtouch)
|
||||
coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
|
@@ -2,26 +2,34 @@
|
||||
|
||||
import logging
|
||||
|
||||
from airtouch4pyapi import AirTouch
|
||||
from airtouch4pyapi.airtouch import AirTouchStatus
|
||||
|
||||
from homeassistant.components.climate import SCAN_INTERVAL
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to manage fetching Airtouch data."""
|
||||
|
||||
def __init__(self, hass, airtouch):
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch
|
||||
) -> None:
|
||||
"""Initialize global Airtouch data updater."""
|
||||
self.airtouch = airtouch
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
@@ -18,7 +18,9 @@ from homeassistant.components.binary_sensor import (
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_update_unique_id
|
||||
@@ -51,11 +53,47 @@ BINARY_SENSORS: Final = (
|
||||
),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
is_available_fn=lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
DEPRECATED_BINARY_SENSORS: Final = (
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="bluetooth",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
translation_key="bluetooth",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="babyCryDetectionState",
|
||||
translation_key="baby_cry_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="beepingApplianceDetectionState",
|
||||
translation_key="beeping_appliance_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="coughDetectionState",
|
||||
translation_key="cough_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="dogBarkDetectionState",
|
||||
translation_key="dog_bark_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="waterSoundsDetectionState",
|
||||
translation_key="water_sounds_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -66,6 +104,8 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
# Replace unique id for "detectionState" binary sensor
|
||||
await async_update_unique_id(
|
||||
hass,
|
||||
@@ -75,6 +115,16 @@ async def async_setup_entry(
|
||||
"detectionState",
|
||||
)
|
||||
|
||||
# Clean up deprecated sensors
|
||||
for sensor_desc in DEPRECATED_BINARY_SENSORS:
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-{sensor_desc.key}"
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
BINARY_SENSOR_DOMAIN, DOMAIN, unique_id
|
||||
):
|
||||
_LOGGER.debug("Removing deprecated entity %s", entity_id)
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.7"]
|
||||
"requirements": ["aioamazondevices==6.4.4"]
|
||||
}
|
||||
|
@@ -32,7 +32,9 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
)
|
||||
|
||||
|
||||
@@ -40,9 +42,9 @@ SENSORS: Final = (
|
||||
AmazonSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement_fn=lambda device, _key: (
|
||||
native_unit_of_measurement_fn=lambda device, key: (
|
||||
UnitOfTemperature.CELSIUS
|
||||
if device.sensors[_key].scale == "CELSIUS"
|
||||
if key in device.sensors and device.sensors[key].scale == "CELSIUS"
|
||||
else UnitOfTemperature.FAHRENHEIT
|
||||
),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
|
@@ -18,7 +18,11 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import alexa_api_call, async_update_unique_id
|
||||
from .utils import (
|
||||
alexa_api_call,
|
||||
async_remove_dnd_from_virtual_group,
|
||||
async_update_unique_id,
|
||||
)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -29,7 +33,9 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
)
|
||||
method: str
|
||||
|
||||
@@ -58,6 +64,9 @@ async def async_setup_entry(
|
||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||
)
|
||||
|
||||
# Remove DND switch from virtual groups
|
||||
await async_remove_dnd_from_virtual_group(hass, coordinator)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
|
@@ -4,8 +4,10 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
@@ -61,3 +63,21 @@ async def async_update_unique_id(
|
||||
|
||||
# Update the registry with the new unique_id
|
||||
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||
|
||||
|
||||
async def async_remove_dnd_from_virtual_group(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
) -> None:
|
||||
"""Remove entity DND from virtual group."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-do_not_disturb"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
DOMAIN, SWITCH_DOMAIN, unique_id
|
||||
)
|
||||
is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY
|
||||
if entity_id and is_group:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
||||
|
@@ -7,13 +7,13 @@ from collections import namedtuple
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
import functools
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
||||
from aiohttp import ClientSession
|
||||
from asusrouter import AsusRouter, AsusRouterError
|
||||
from asusrouter.config import ARConfigKey
|
||||
from asusrouter.modules.client import AsusClient
|
||||
from asusrouter.modules.client import AsusClient, ConnectionState
|
||||
from asusrouter.modules.data import AsusData
|
||||
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
|
||||
from asusrouter.tools.connection import get_cookie_jar
|
||||
@@ -219,7 +219,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Get connected status."""
|
||||
return cast(bool, self._api.is_connected)
|
||||
return self._api.is_connected
|
||||
|
||||
async def async_connect(self) -> None:
|
||||
"""Connect to the device."""
|
||||
@@ -235,8 +235,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
||||
|
||||
async def async_disconnect(self) -> None:
|
||||
"""Disconnect to the device."""
|
||||
if self._api is not None and self._protocol == PROTOCOL_TELNET:
|
||||
self._api.connection.disconnect()
|
||||
await self._api.async_disconnect()
|
||||
|
||||
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
||||
"""Get list of connected devices."""
|
||||
@@ -437,6 +436,7 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
if dev.connection is not None
|
||||
and dev.description is not None
|
||||
and dev.connection.ip_address is not None
|
||||
and dev.state is ConnectionState.CONNECTED
|
||||
}
|
||||
|
||||
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.21.0"]
|
||||
"requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"]
|
||||
}
|
||||
|
@@ -36,11 +36,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
|
||||
raise ConfigEntryAuthFailed("Migration to OAuth required")
|
||||
|
||||
session = async_create_august_clientsession(hass)
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
try:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
)
|
||||
except ValueError as err:
|
||||
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
||||
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||
try:
|
||||
|
@@ -68,12 +68,17 @@ class IntegrationMatchHistory:
|
||||
manufacturer_data: bool
|
||||
service_data: set[str]
|
||||
service_uuids: set[str]
|
||||
name: str
|
||||
|
||||
|
||||
def seen_all_fields(
|
||||
previous_match: IntegrationMatchHistory, advertisement_data: AdvertisementData
|
||||
previous_match: IntegrationMatchHistory,
|
||||
advertisement_data: AdvertisementData,
|
||||
name: str,
|
||||
) -> bool:
|
||||
"""Return if we have seen all fields."""
|
||||
if previous_match.name != name:
|
||||
return False
|
||||
if not previous_match.manufacturer_data and advertisement_data.manufacturer_data:
|
||||
return False
|
||||
if advertisement_data.service_data and (
|
||||
@@ -122,10 +127,11 @@ class IntegrationMatcher:
|
||||
device = service_info.device
|
||||
advertisement_data = service_info.advertisement
|
||||
connectable = service_info.connectable
|
||||
name = service_info.name
|
||||
matched = self._matched_connectable if connectable else self._matched
|
||||
matched_domains: set[str] = set()
|
||||
if (previous_match := matched.get(device.address)) and seen_all_fields(
|
||||
previous_match, advertisement_data
|
||||
previous_match, advertisement_data, name
|
||||
):
|
||||
# We have seen all fields so we can skip the rest of the matchers
|
||||
return matched_domains
|
||||
@@ -140,11 +146,13 @@ class IntegrationMatcher:
|
||||
)
|
||||
previous_match.service_data |= set(advertisement_data.service_data)
|
||||
previous_match.service_uuids |= set(advertisement_data.service_uuids)
|
||||
previous_match.name = name
|
||||
else:
|
||||
matched[device.address] = IntegrationMatchHistory(
|
||||
manufacturer_data=bool(advertisement_data.manufacturer_data),
|
||||
service_data=set(advertisement_data.service_data),
|
||||
service_uuids=set(advertisement_data.service_uuids),
|
||||
name=name,
|
||||
)
|
||||
return matched_domains
|
||||
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"requirements": ["brother==5.1.0"],
|
||||
"requirements": ["brother==5.1.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
@@ -38,6 +38,10 @@ TYPE_SPECIFY_COUNTRY = "specify_country_code"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DESCRIPTION_PLACEHOLDER = {
|
||||
"register_link": "https://electricitymaps.com/free-tier",
|
||||
}
|
||||
|
||||
|
||||
class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Co2signal."""
|
||||
@@ -70,6 +74,7 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=data_schema,
|
||||
description_placeholders=DESCRIPTION_PLACEHOLDER,
|
||||
)
|
||||
|
||||
data = {CONF_API_KEY: user_input[CONF_API_KEY]}
|
||||
@@ -179,4 +184,5 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id=step_id,
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
description_placeholders=DESCRIPTION_PLACEHOLDER,
|
||||
)
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"location": "[%key:common::config_flow::data::location%]",
|
||||
"api_key": "[%key:common::config_flow::data::access_token%]"
|
||||
},
|
||||
"description": "Visit https://electricitymaps.com/free-tier to request a token."
|
||||
"description": "Visit the [Electricity Maps page]({register_link}) to request a token."
|
||||
},
|
||||
"coordinates": {
|
||||
"data": {
|
||||
|
@@ -166,6 +166,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
description_placeholders={
|
||||
"account_name": self.reauth_entry.title,
|
||||
"developer_url": "https://www.coinbase.com/developer-platform",
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
@@ -195,6 +196,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
description_placeholders={
|
||||
"account_name": self.reauth_entry.title,
|
||||
"developer_url": "https://www.coinbase.com/developer-platform",
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
@@ -11,7 +11,7 @@
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "Update Coinbase API credentials",
|
||||
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform to create new credentials for {account_name}.",
|
||||
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit the [Developer Platform]({developer_url}) to create new credentials for {account_name}.",
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"api_token": "API secret"
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from asyncio.exceptions import TimeoutError
|
||||
from collections.abc import Mapping
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from aiocomelit import (
|
||||
@@ -27,25 +28,20 @@ from .utils import async_client_session
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = "111111"
|
||||
|
||||
|
||||
pin_regex = r"^[0-9]{4,10}$"
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -55,6 +51,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
|
||||
api: ComelitCommonApi
|
||||
|
||||
if not re.fullmatch(r"[0-9]{4,10}", data[CONF_PIN]):
|
||||
raise InvalidPin
|
||||
|
||||
session = await async_client_session(hass)
|
||||
if data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
api = ComeliteSerialBridgeApi(
|
||||
@@ -105,6 +104,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -146,6 +147,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -189,6 +192,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -210,3 +215,7 @@ class CannotConnect(HomeAssistantError):
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
|
||||
class InvalidPin(HomeAssistantError):
|
||||
"""Error to indicate an invalid pin."""
|
||||
|
@@ -161,7 +161,7 @@ class ComelitSerialBridge(
|
||||
entry: ComelitConfigEntry,
|
||||
host: str,
|
||||
port: int,
|
||||
pin: int,
|
||||
pin: str,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
@@ -195,7 +195,7 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
entry: ComelitConfigEntry,
|
||||
host: str,
|
||||
port: int,
|
||||
pin: int,
|
||||
pin: str,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
|
@@ -7,7 +7,14 @@ from typing import Any, cast
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
||||
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||
from homeassistant.components.cover import (
|
||||
STATE_CLOSED,
|
||||
STATE_CLOSING,
|
||||
STATE_OPEN,
|
||||
STATE_OPENING,
|
||||
CoverDeviceClass,
|
||||
CoverEntity,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
@@ -62,7 +69,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
super().__init__(coordinator, device, config_entry_entry_id)
|
||||
# Device doesn't provide a status so we assume UNKNOWN at first startup
|
||||
self._last_action: int | None = None
|
||||
self._last_state: str | None = None
|
||||
|
||||
def _current_action(self, action: str) -> bool:
|
||||
"""Return the current cover action."""
|
||||
@@ -98,7 +104,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
@bridge_api_call
|
||||
async def _cover_set_state(self, action: int, state: int) -> None:
|
||||
"""Set desired cover state."""
|
||||
self._last_state = self.state
|
||||
await self.coordinator.api.set_device_status(COVER, self._device.index, action)
|
||||
self.coordinator.data[COVER][self._device.index].status = state
|
||||
self.async_write_ha_state()
|
||||
@@ -124,5 +129,10 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if last_state := await self.async_get_last_state():
|
||||
self._last_state = last_state.state
|
||||
if (state := await self.async_get_last_state()) is not None:
|
||||
if state.state == STATE_CLOSED:
|
||||
self._last_action = STATE_COVER.index(STATE_CLOSING)
|
||||
if state.state == STATE_OPEN:
|
||||
self._last_action = STATE_COVER.index(STATE_OPENING)
|
||||
|
||||
self._attr_is_closed = state.state == STATE_CLOSED
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
"requirements": ["aiocomelit==1.1.2"]
|
||||
}
|
||||
|
@@ -43,11 +43,13 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "The provided PIN is invalid. It must be a 4-10 digit number.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "[%key:component::comelit::config::abort::invalid_pin%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
|
@@ -514,7 +514,7 @@ class ChatLog:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_api: llm.APIInstance | None = None
|
||||
|
||||
if user_llm_hass_api is None:
|
||||
if not user_llm_hass_api:
|
||||
pass
|
||||
elif isinstance(user_llm_hass_api, llm.API):
|
||||
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pycync==0.4.0"]
|
||||
"requirements": ["pycync==0.4.1"]
|
||||
}
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydaikin"],
|
||||
"requirements": ["pydaikin==2.16.0"],
|
||||
"requirements": ["pydaikin==2.17.1"],
|
||||
"zeroconf": ["_dkapi._tcp.local."]
|
||||
}
|
||||
|
@@ -61,5 +61,8 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="authorize",
|
||||
errors=errors,
|
||||
description_placeholders={"pin": self._ecobee.pin},
|
||||
description_placeholders={
|
||||
"pin": self._ecobee.pin,
|
||||
"auth_url": "https://www.ecobee.com/consumerportal/index.html",
|
||||
},
|
||||
)
|
||||
|
@@ -8,7 +8,7 @@
|
||||
}
|
||||
},
|
||||
"authorize": {
|
||||
"description": "Please authorize this app at https://www.ecobee.com/consumerportal/index.html with PIN code:\n\n{pin}\n\nThen, select **Submit**."
|
||||
"description": "Please authorize this app at {auth_url} with PIN code:\n\n{pin}\n\nThen, select **Submit**."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.1.0"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.11.2"]
|
||||
"requirements": ["env-canada==0.11.3"]
|
||||
}
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251001.0"]
|
||||
"requirements": ["home-assistant-frontend==20251001.4"]
|
||||
}
|
||||
|
@@ -76,10 +76,6 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleAssistantSDKConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
||||
hass.services.async_remove(DOMAIN, service_name)
|
||||
|
||||
conversation.async_unset_agent(hass, entry)
|
||||
|
||||
return True
|
||||
|
@@ -26,7 +26,7 @@ from homeassistant.components.media_player import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
@@ -68,7 +68,13 @@ async def async_send_text_commands(
|
||||
) -> list[CommandResponse]:
|
||||
"""Send text commands to Google Assistant Service."""
|
||||
# There can only be 1 entry (config_flow has single_instance_allowed)
|
||||
entry: GoogleAssistantSDKConfigEntry = hass.config_entries.async_entries(DOMAIN)[0]
|
||||
entries = hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
if not entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_loaded",
|
||||
)
|
||||
entry: GoogleAssistantSDKConfigEntry = entries[0]
|
||||
|
||||
session = entry.runtime_data.session
|
||||
try:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Support for Google Assistant SDK."""
|
||||
"""Services for the Google Assistant SDK integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@@ -65,6 +65,9 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"entry_not_loaded": {
|
||||
"message": "Entry not loaded"
|
||||
},
|
||||
"grpc_error": {
|
||||
"message": "Failed to communicate with Google Assistant"
|
||||
}
|
||||
|
@@ -456,6 +456,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self.default_model = default_model
|
||||
self._attr_name = subentry.title
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
@@ -489,7 +490,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model_name = options.get(CONF_CHAT_MODEL, self.default_model)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
@@ -620,6 +621,13 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
def create_generate_content_config(self) -> GenerateContentConfig:
|
||||
"""Create the GenerateContentConfig for the LLM."""
|
||||
options = self.subentry.data
|
||||
model = options.get(CONF_CHAT_MODEL, self.default_model)
|
||||
thinking_config: ThinkingConfig | None = None
|
||||
if model.startswith("models/gemini-2.5") and not model.endswith(
|
||||
("tts", "image", "image-preview")
|
||||
):
|
||||
thinking_config = ThinkingConfig(include_thoughts=True)
|
||||
|
||||
return GenerateContentConfig(
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
top_k=options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
@@ -652,7 +660,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
),
|
||||
),
|
||||
],
|
||||
thinking_config=ThinkingConfig(include_thoughts=True),
|
||||
thinking_config=thinking_config,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["aiohasupervisor==0.3.3b0"],
|
||||
"requirements": ["aiohasupervisor==0.3.3"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.81", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.82", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -67,11 +67,7 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
"""Mixin for Home Assistant Connect ZBT-2 firmware methods."""
|
||||
|
||||
context: ConfigFlowContext
|
||||
|
||||
# `rts_dtr` targets older adapters, `baudrate` works for newer ones. The reason we
|
||||
# try them in this order is that on older adapters `baudrate` entered the ESP32-S3
|
||||
# bootloader instead of the MG24 bootloader.
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE]
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
@@ -157,7 +157,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Connect ZBT-2 firmware update entity."""
|
||||
|
||||
bootloader_reset_methods = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE]
|
||||
bootloader_reset_methods = [ResetTarget.RTS_DTR]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@@ -1,15 +1,20 @@
|
||||
"""Home Assistant Hardware integration helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Awaitable, Callable
|
||||
from contextlib import asynccontextmanager
|
||||
import logging
|
||||
from typing import Protocol
|
||||
from typing import TYPE_CHECKING, Protocol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback
|
||||
|
||||
from . import DATA_COMPONENT
|
||||
from .util import FirmwareInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .util import FirmwareInfo
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -51,6 +56,7 @@ class HardwareInfoDispatcher:
|
||||
self._notification_callbacks: defaultdict[
|
||||
str, set[Callable[[FirmwareInfo], None]]
|
||||
] = defaultdict(set)
|
||||
self._active_firmware_updates: dict[str, str] = {}
|
||||
|
||||
def register_firmware_info_provider(
|
||||
self, domain: str, platform: HardwareFirmwareInfoModule
|
||||
@@ -118,6 +124,36 @@ class HardwareInfoDispatcher:
|
||||
if fw_info is not None:
|
||||
yield fw_info
|
||||
|
||||
def register_firmware_update_in_progress(
|
||||
self, device: str, source_domain: str
|
||||
) -> None:
|
||||
"""Register that a firmware update is in progress for a device."""
|
||||
if device in self._active_firmware_updates:
|
||||
current_domain = self._active_firmware_updates[device]
|
||||
raise ValueError(
|
||||
f"Firmware update already in progress for {device} by {current_domain}"
|
||||
)
|
||||
self._active_firmware_updates[device] = source_domain
|
||||
|
||||
def unregister_firmware_update_in_progress(
|
||||
self, device: str, source_domain: str
|
||||
) -> None:
|
||||
"""Unregister a firmware update for a device."""
|
||||
if device not in self._active_firmware_updates:
|
||||
raise ValueError(f"No firmware update in progress for {device}")
|
||||
|
||||
if self._active_firmware_updates[device] != source_domain:
|
||||
current_domain = self._active_firmware_updates[device]
|
||||
raise ValueError(
|
||||
f"Firmware update for {device} is owned by {current_domain}, not {source_domain}"
|
||||
)
|
||||
|
||||
del self._active_firmware_updates[device]
|
||||
|
||||
def is_firmware_update_in_progress(self, device: str) -> bool:
|
||||
"""Check if a firmware update is in progress for a device."""
|
||||
return device in self._active_firmware_updates
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_register_firmware_info_provider(
|
||||
@@ -141,3 +177,42 @@ def async_notify_firmware_info(
|
||||
) -> Awaitable[None]:
|
||||
"""Notify the dispatcher of new firmware information."""
|
||||
return hass.data[DATA_COMPONENT].notify_firmware_info(domain, firmware_info)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_register_firmware_update_in_progress(
|
||||
hass: HomeAssistant, device: str, source_domain: str
|
||||
) -> None:
|
||||
"""Register that a firmware update is in progress for a device."""
|
||||
return hass.data[DATA_COMPONENT].register_firmware_update_in_progress(
|
||||
device, source_domain
|
||||
)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_unregister_firmware_update_in_progress(
|
||||
hass: HomeAssistant, device: str, source_domain: str
|
||||
) -> None:
|
||||
"""Unregister a firmware update for a device."""
|
||||
return hass.data[DATA_COMPONENT].unregister_firmware_update_in_progress(
|
||||
device, source_domain
|
||||
)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_is_firmware_update_in_progress(hass: HomeAssistant, device: str) -> bool:
|
||||
"""Check if a firmware update is in progress for a device."""
|
||||
return hass.data[DATA_COMPONENT].is_firmware_update_in_progress(device)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_firmware_update_context(
|
||||
hass: HomeAssistant, device: str, source_domain: str
|
||||
) -> AsyncIterator[None]:
|
||||
"""Register a device as having its firmware being actively updated."""
|
||||
async_register_firmware_update_in_progress(hass, device, source_domain)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
async_unregister_firmware_update_in_progress(hass, device, source_domain)
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
|
||||
"integration_type": "system",
|
||||
"requirements": [
|
||||
"universal-silabs-flasher==0.0.34",
|
||||
"universal-silabs-flasher==0.0.35",
|
||||
"ha-silabs-firmware-client==0.2.0"
|
||||
]
|
||||
}
|
||||
|
@@ -67,7 +67,7 @@
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"not_hassio_thread": "The OpenThread Border Router add-on can only be installed with Home Assistant OS. If you would like to use the {model} as a Thread border router, please flash the firmware manually using the [web flasher]({docs_web_flasher_url}) and set up OpenThread Border Router to communicate with it.",
|
||||
"not_hassio_thread": "The OpenThread Border Router add-on can only be installed with Home Assistant OS. If you would like to use the {model} as a Thread border router, please manually set up OpenThread Border Router to communicate with it.",
|
||||
"otbr_addon_already_running": "The OpenThread Border Router add-on is already running, it cannot be installed again.",
|
||||
"zha_still_using_stick": "This {model} is in use by the Zigbee Home Automation integration. Please migrate your Zigbee network to another adapter or delete the integration and try again.",
|
||||
"otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again.",
|
||||
|
@@ -275,6 +275,7 @@ class BaseFirmwareUpdateEntity(
|
||||
expected_installed_firmware_type=self.entity_description.expected_firmware_type,
|
||||
bootloader_reset_methods=self.bootloader_reset_methods,
|
||||
progress_callback=self._update_progress,
|
||||
domain=self._config_entry.domain,
|
||||
)
|
||||
finally:
|
||||
self._attr_in_progress = False
|
||||
|
@@ -26,6 +26,7 @@ from homeassistant.helpers.singleton import singleton
|
||||
|
||||
from . import DATA_COMPONENT
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
OTBR_ADDON_MANAGER_DATA,
|
||||
OTBR_ADDON_NAME,
|
||||
OTBR_ADDON_SLUG,
|
||||
@@ -33,6 +34,7 @@ from .const import (
|
||||
ZIGBEE_FLASHER_ADDON_NAME,
|
||||
ZIGBEE_FLASHER_ADDON_SLUG,
|
||||
)
|
||||
from .helpers import async_firmware_update_context
|
||||
from .silabs_multiprotocol_addon import (
|
||||
WaitingAddonManager,
|
||||
get_multiprotocol_addon_manager,
|
||||
@@ -359,45 +361,50 @@ async def async_flash_silabs_firmware(
|
||||
expected_installed_firmware_type: ApplicationType,
|
||||
bootloader_reset_methods: Sequence[ResetTarget] = (),
|
||||
progress_callback: Callable[[int, int], None] | None = None,
|
||||
*,
|
||||
domain: str = DOMAIN,
|
||||
) -> FirmwareInfo:
|
||||
"""Flash firmware to the SiLabs device."""
|
||||
firmware_info = await guess_firmware_info(hass, device)
|
||||
_LOGGER.debug("Identified firmware info: %s", firmware_info)
|
||||
async with async_firmware_update_context(hass, device, domain):
|
||||
firmware_info = await guess_firmware_info(hass, device)
|
||||
_LOGGER.debug("Identified firmware info: %s", firmware_info)
|
||||
|
||||
fw_image = await hass.async_add_executor_job(parse_firmware_image, fw_data)
|
||||
fw_image = await hass.async_add_executor_job(parse_firmware_image, fw_data)
|
||||
|
||||
flasher = Flasher(
|
||||
device=device,
|
||||
probe_methods=(
|
||||
ApplicationType.GECKO_BOOTLOADER.as_flasher_application_type(),
|
||||
ApplicationType.EZSP.as_flasher_application_type(),
|
||||
ApplicationType.SPINEL.as_flasher_application_type(),
|
||||
ApplicationType.CPC.as_flasher_application_type(),
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
),
|
||||
)
|
||||
|
||||
async with AsyncExitStack() as stack:
|
||||
for owner in firmware_info.owners:
|
||||
await stack.enter_async_context(owner.temporarily_stop(hass))
|
||||
|
||||
try:
|
||||
# Enter the bootloader with indeterminate progress
|
||||
await flasher.enter_bootloader()
|
||||
|
||||
# Flash the firmware, with progress
|
||||
await flasher.flash_firmware(fw_image, progress_callback=progress_callback)
|
||||
except Exception as err:
|
||||
raise HomeAssistantError("Failed to flash firmware") from err
|
||||
|
||||
probed_firmware_info = await probe_silabs_firmware_info(
|
||||
device,
|
||||
probe_methods=(expected_installed_firmware_type,),
|
||||
flasher = Flasher(
|
||||
device=device,
|
||||
probe_methods=(
|
||||
ApplicationType.GECKO_BOOTLOADER.as_flasher_application_type(),
|
||||
ApplicationType.EZSP.as_flasher_application_type(),
|
||||
ApplicationType.SPINEL.as_flasher_application_type(),
|
||||
ApplicationType.CPC.as_flasher_application_type(),
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
),
|
||||
)
|
||||
|
||||
if probed_firmware_info is None:
|
||||
raise HomeAssistantError("Failed to probe the firmware after flashing")
|
||||
async with AsyncExitStack() as stack:
|
||||
for owner in firmware_info.owners:
|
||||
await stack.enter_async_context(owner.temporarily_stop(hass))
|
||||
|
||||
return probed_firmware_info
|
||||
try:
|
||||
# Enter the bootloader with indeterminate progress
|
||||
await flasher.enter_bootloader()
|
||||
|
||||
# Flash the firmware, with progress
|
||||
await flasher.flash_firmware(
|
||||
fw_image, progress_callback=progress_callback
|
||||
)
|
||||
except Exception as err:
|
||||
raise HomeAssistantError("Failed to flash firmware") from err
|
||||
|
||||
probed_firmware_info = await probe_silabs_firmware_info(
|
||||
device,
|
||||
probe_methods=(expected_installed_firmware_type,),
|
||||
)
|
||||
|
||||
if probed_firmware_info is None:
|
||||
raise HomeAssistantError("Failed to probe the firmware after flashing")
|
||||
|
||||
return probed_firmware_info
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.18"],
|
||||
"requirements": ["aiohomekit==3.2.19"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
@@ -158,7 +158,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_import_kwh is not None,
|
||||
value_fn=lambda data: data.measurement.energy_import_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_import_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_import_t1_kwh",
|
||||
@@ -172,7 +172,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
data.measurement.energy_import_t1_kwh is not None
|
||||
and data.measurement.energy_export_t2_kwh is not None
|
||||
),
|
||||
value_fn=lambda data: data.measurement.energy_import_t1_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_import_t1_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_import_t2_kwh",
|
||||
@@ -182,7 +182,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_import_t2_kwh is not None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t2_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_import_t2_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_import_t3_kwh",
|
||||
@@ -192,7 +192,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_import_t3_kwh is not None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t3_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_import_t3_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_import_t4_kwh",
|
||||
@@ -202,7 +202,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_import_t4_kwh is not None,
|
||||
value_fn=lambda data: data.measurement.energy_import_t4_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_import_t4_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_kwh",
|
||||
@@ -212,7 +212,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_export_kwh is not None,
|
||||
enabled_fn=lambda data: data.measurement.energy_export_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_export_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_t1_kwh",
|
||||
@@ -227,7 +227,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
and data.measurement.energy_export_t2_kwh is not None
|
||||
),
|
||||
enabled_fn=lambda data: data.measurement.energy_export_t1_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_t1_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_export_t1_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_t2_kwh",
|
||||
@@ -238,7 +238,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_export_t2_kwh is not None,
|
||||
enabled_fn=lambda data: data.measurement.energy_export_t2_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_t2_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_export_t2_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_t3_kwh",
|
||||
@@ -249,7 +249,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_export_t3_kwh is not None,
|
||||
enabled_fn=lambda data: data.measurement.energy_export_t3_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_t3_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_export_t3_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="total_power_export_t4_kwh",
|
||||
@@ -260,7 +260,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
has_fn=lambda data: data.measurement.energy_export_t4_kwh is not None,
|
||||
enabled_fn=lambda data: data.measurement.energy_export_t4_kwh != 0,
|
||||
value_fn=lambda data: data.measurement.energy_export_t4_kwh,
|
||||
value_fn=lambda data: data.measurement.energy_export_t4_kwh or None,
|
||||
),
|
||||
HomeWizardSensorEntityDescription(
|
||||
key="active_power_w",
|
||||
|
@@ -112,6 +112,9 @@ class HuaweiLteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
),
|
||||
errors=errors or {},
|
||||
description_placeholders={
|
||||
"sample_ip": "http://192.168.X.1",
|
||||
},
|
||||
)
|
||||
|
||||
async def _async_show_reauth_form(
|
||||
@@ -132,6 +135,9 @@ class HuaweiLteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
),
|
||||
errors=errors or {},
|
||||
description_placeholders={
|
||||
"sample_ip": "http://192.168.X.1",
|
||||
},
|
||||
)
|
||||
|
||||
async def _connect(
|
||||
@@ -406,4 +412,10 @@ class HuaweiLteOptionsFlow(OptionsFlow):
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return self.async_show_form(step_id="init", data_schema=data_schema)
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=data_schema,
|
||||
description_placeholders={
|
||||
"sample_ip": "http://192.168.X.1",
|
||||
},
|
||||
)
|
||||
|
@@ -41,7 +41,7 @@
|
||||
},
|
||||
"data_description": {
|
||||
"password": "Password for accessing the router's API. Typically, the same as the one used for the router's web interface.",
|
||||
"url": "Base URL to the API of the router. Typically, something like `http://192.168.X.1`. This is the beginning of the location shown in a browser when accessing the router's web interface.",
|
||||
"url": "Base URL to the API of the router. Typically, something like `{sample_ip}`. This is the beginning of the location shown in a browser when accessing the router's web interface.",
|
||||
"username": "Username for accessing the router's API. Typically, the same as the one used for the router's web interface. Usually, either `admin`, or left empty (recommended if that works).",
|
||||
"verify_ssl": "Whether to verify the SSL certificate of the router when accessing it. Applicable only if the router is accessed via HTTPS."
|
||||
},
|
||||
|
@@ -68,6 +68,7 @@
|
||||
"initial_press": "\"{subtype}\" pressed initially",
|
||||
"repeat": "\"{subtype}\" held down",
|
||||
"short_release": "\"{subtype}\" released after short press",
|
||||
"long_press": "\"{subtype}\" long pressed",
|
||||
"long_release": "[%key:component::hue::device_automation::trigger_type::remote_button_long_release%]",
|
||||
"double_short_release": "[%key:component::hue::device_automation::trigger_type::remote_double_button_short_press%]",
|
||||
"start": "[%key:component::hue::device_automation::trigger_type::initial_press%]"
|
||||
|
@@ -8,13 +8,16 @@ from idasen_ha import Desk
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type IdasenDeskConfigEntry = ConfigEntry[IdasenDeskCoordinator]
|
||||
|
||||
UPDATE_DEBOUNCE_TIME = 0.2
|
||||
|
||||
|
||||
class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Class to manage updates for the Idasen Desk."""
|
||||
@@ -33,9 +36,22 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
hass, _LOGGER, config_entry=config_entry, name=config_entry.title
|
||||
)
|
||||
self.address = address
|
||||
self._expected_connected = False
|
||||
self.desk = Desk(self._async_handle_update)
|
||||
|
||||
self.desk = Desk(self.async_set_updated_data)
|
||||
self._expected_connected = False
|
||||
self._height: int | None = None
|
||||
|
||||
@callback
|
||||
def async_update_data() -> None:
|
||||
self.async_set_updated_data(self._height)
|
||||
|
||||
self._debouncer = Debouncer(
|
||||
hass=self.hass,
|
||||
logger=_LOGGER,
|
||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
||||
immediate=True,
|
||||
function=async_update_data,
|
||||
)
|
||||
|
||||
async def async_connect(self) -> bool:
|
||||
"""Connect to desk."""
|
||||
@@ -60,3 +76,9 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Ensure that the desk is connected if that is the expected state."""
|
||||
if self._expected_connected:
|
||||
await self.async_connect()
|
||||
|
||||
@callback
|
||||
def _async_handle_update(self, height: int | None) -> None:
|
||||
"""Handle an update from the desk."""
|
||||
self._height = height
|
||||
self._debouncer.async_schedule_call()
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import API_ACCESS_URL, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,5 +57,8 @@ class IgloohomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={"api_access_url": API_ACCESS_URL},
|
||||
)
|
||||
|
@@ -1,3 +1,4 @@
|
||||
"""Constants for the igloohome integration."""
|
||||
|
||||
DOMAIN = "igloohome"
|
||||
API_ACCESS_URL = "https://access.igloocompany.co/api-access"
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Copy & paste your [API access credentials](https://access.igloocompany.co/api-access) to give Home Assistant access to your account.",
|
||||
"description": "Copy & paste your [API access credentials]({api_access_url}) to give Home Assistant access to your account.",
|
||||
"data": {
|
||||
"client_id": "Client ID",
|
||||
"client_secret": "Client secret"
|
||||
|
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"domain": "iometer",
|
||||
"name": "IOmeter",
|
||||
"codeowners": ["@MaestroOnICe"],
|
||||
"codeowners": ["@jukrebs"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/iometer",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["iometer==0.1.0"],
|
||||
"requirements": ["iometer==0.2.0"],
|
||||
"zeroconf": ["_iometer._tcp.local."]
|
||||
}
|
||||
|
@@ -177,6 +177,9 @@ class Isy994ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=_data_schema(self.discovered_conf),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"sample_ip": "http://192.168.10.100:80",
|
||||
},
|
||||
)
|
||||
|
||||
async def _async_set_unique_id_or_update(
|
||||
@@ -302,7 +305,10 @@ class Isy994ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_HOST: existing_data[CONF_HOST],
|
||||
}
|
||||
return self.async_show_form(
|
||||
description_placeholders={CONF_HOST: existing_data[CONF_HOST]},
|
||||
description_placeholders={
|
||||
CONF_HOST: existing_data[CONF_HOST],
|
||||
"sample_ip": "http://192.168.10.100:80",
|
||||
},
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
@@ -347,7 +353,13 @@ class OptionsFlowHandler(OptionsFlowWithReload):
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=options_schema)
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=options_schema,
|
||||
description_placeholders={
|
||||
"sample_ip": "http://192.168.10.100:80",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class InvalidHost(HomeAssistantError):
|
||||
|
@@ -9,7 +9,7 @@
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"tls": "The TLS version of the ISY controller."
|
||||
},
|
||||
"description": "The host entry must be in full URL format, e.g., http://192.168.10.100:80",
|
||||
"description": "The host entry must be in full URL format, e.g., {sample_ip}",
|
||||
"title": "Connect to your ISY"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
@@ -26,7 +26,7 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_host": "The host entry was not in full URL format, e.g., http://192.168.10.100:80"
|
||||
"invalid_host": "The host entry was not in full URL format, e.g., {sample_ip}"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
|
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.1.1"]
|
||||
"requirements": ["pylamarzocco==2.1.2"]
|
||||
}
|
||||
|
@@ -134,4 +134,8 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.show_user_form(user_input, errors)
|
||||
return self.show_user_form(
|
||||
user_input,
|
||||
errors,
|
||||
description_placeholders={"example_url": "https://mastodon.social"},
|
||||
)
|
||||
|
@@ -9,7 +9,7 @@
|
||||
"access_token": "[%key:common::config_flow::data::access_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"base_url": "The URL of your Mastodon instance e.g. https://mastodon.social.",
|
||||
"base_url": "The URL of your Mastodon instance e.g. {example_url}.",
|
||||
"client_id": "The client key for the application created within your Mastodon account.",
|
||||
"client_secret": "The client secret for the application created within your Mastodon account.",
|
||||
"access_token": "The access token for the application created within your Mastodon account."
|
||||
|
@@ -26,6 +26,8 @@ REAUTH_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
EXAMPLE_URL = "http://192.168.1.123:1234"
|
||||
|
||||
|
||||
class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Mealie config flow."""
|
||||
@@ -84,6 +86,7 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=USER_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={"example_url": EXAMPLE_URL},
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
@@ -114,6 +117,7 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="reauth_confirm",
|
||||
data_schema=REAUTH_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={"example_url": EXAMPLE_URL},
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
@@ -142,4 +146,5 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="reconfigure",
|
||||
data_schema=USER_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={"example_url": EXAMPLE_URL},
|
||||
)
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"common": {
|
||||
"data_description_host": "The URL of your Mealie instance, for example, http://192.168.1.123:1234",
|
||||
"data_description_host": "The URL of your Mealie instance, for example, {example_url}.",
|
||||
"data_description_api_token": "The API token of your Mealie instance from your user profile within Mealie.",
|
||||
"data_description_verify_ssl": "Should SSL certificates be verified? This should be off for self-signed certificates."
|
||||
},
|
||||
|
@@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.translation import async_get_cached_translations
|
||||
|
||||
from .const import MEDIA_SOURCE_DATA, URI_SCHEME, URI_SCHEME_REGEX
|
||||
|
||||
@@ -62,12 +63,15 @@ class MediaSourceItem:
|
||||
async def async_browse(self) -> BrowseMediaSource:
|
||||
"""Browse this item."""
|
||||
if self.domain is None:
|
||||
title = async_get_cached_translations(
|
||||
self.hass, self.hass.config.language, "common", "media_source"
|
||||
).get("component.media_source.common.sources_default", "Media Sources")
|
||||
base = BrowseMediaSource(
|
||||
domain=None,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type=MediaType.APPS,
|
||||
title="Media Sources",
|
||||
title=title,
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.APP,
|
||||
|
@@ -9,5 +9,8 @@
|
||||
"unknown_media_source": {
|
||||
"message": "Unknown media source: {domain}"
|
||||
}
|
||||
},
|
||||
"common": {
|
||||
"sources_default": "Media sources"
|
||||
}
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/melcloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pymelcloud"],
|
||||
"requirements": ["python-melcloud==0.1.0"]
|
||||
"requirements": ["python-melcloud==0.1.2"]
|
||||
}
|
||||
|
@@ -54,6 +54,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DEFAULT_PLATE_COUNT = 4
|
||||
|
||||
PLATE_COUNT = {
|
||||
"KM7575": 6,
|
||||
"KM7678": 6,
|
||||
"KM7697": 6,
|
||||
"KM7878": 6,
|
||||
|
@@ -208,7 +208,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
|
||||
def __process_raw_value(self, entry: float | str | bytes) -> str | None:
|
||||
"""Process value from sensor with NaN handling, scaling, offset, min/max etc."""
|
||||
if self._nan_value and entry in (self._nan_value, -self._nan_value):
|
||||
if self._nan_value is not None and entry in (self._nan_value, -self._nan_value):
|
||||
return None
|
||||
if isinstance(entry, bytes):
|
||||
return entry.decode()
|
||||
|
@@ -253,6 +253,7 @@ class ModbusHub:
|
||||
self._client: (
|
||||
AsyncModbusSerialClient | AsyncModbusTcpClient | AsyncModbusUdpClient | None
|
||||
) = None
|
||||
self._lock = asyncio.Lock()
|
||||
self.event_connected = asyncio.Event()
|
||||
self.hass = hass
|
||||
self.name = client_config[CONF_NAME]
|
||||
@@ -415,7 +416,9 @@ class ModbusHub:
|
||||
"""Convert async to sync pymodbus call."""
|
||||
if not self._client:
|
||||
return None
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
async with self._lock:
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
# small delay until next request/response
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
|
@@ -188,7 +188,10 @@ class MqttLock(MqttEntity, LockEntity):
|
||||
return
|
||||
if payload == self._config[CONF_PAYLOAD_RESET]:
|
||||
# Reset the state to `unknown`
|
||||
self._attr_is_locked = None
|
||||
self._attr_is_locked = self._attr_is_locking = None
|
||||
self._attr_is_unlocking = None
|
||||
self._attr_is_open = self._attr_is_opening = None
|
||||
self._attr_is_jammed = None
|
||||
elif payload in self._valid_states:
|
||||
self._attr_is_locked = payload == self._config[CONF_STATE_LOCKED]
|
||||
self._attr_is_locking = payload == self._config[CONF_STATE_LOCKING]
|
||||
|
@@ -73,6 +73,13 @@ STEP_MODBUS_DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
STEP_MODBUS_PLACEHOLDERS = {
|
||||
"tcp": "tcp://[HOST]:[PORT]",
|
||||
"serial": "serial://[LOCAL DEVICE]",
|
||||
"rfc2217": "rfc2217://[HOST]:[PORT]",
|
||||
}
|
||||
|
||||
|
||||
class FieldError(Exception):
|
||||
"""Field with invalid data."""
|
||||
|
||||
@@ -183,7 +190,9 @@ class NibeHeatPumpConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the modbus step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="modbus", data_schema=STEP_MODBUS_DATA_SCHEMA
|
||||
step_id="modbus",
|
||||
data_schema=STEP_MODBUS_DATA_SCHEMA,
|
||||
description_placeholders=STEP_MODBUS_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
errors = {}
|
||||
@@ -200,7 +209,10 @@ class NibeHeatPumpConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title=title, data=data)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="modbus", data_schema=STEP_MODBUS_DATA_SCHEMA, errors=errors
|
||||
step_id="modbus",
|
||||
data_schema=STEP_MODBUS_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders=STEP_MODBUS_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async def async_step_nibegw(
|
||||
|
@@ -15,7 +15,7 @@
|
||||
"modbus_unit": "Modbus unit identifier"
|
||||
},
|
||||
"data_description": {
|
||||
"modbus_url": "Modbus URL that describes the connection to your heat pump or MODBUS40 unit. It should be in the form:\n - `tcp://[HOST]:[PORT]` for Modbus TCP connection\n - `serial://[LOCAL DEVICE]` for a local Modbus RTU connection\n - `rfc2217://[HOST]:[PORT]` for a remote Telnet-based Modbus RTU connection.",
|
||||
"modbus_url": "Modbus URL that describes the connection to your heat pump or MODBUS40 unit. It should be in the form:\n - `{tcp}` for Modbus TCP connection\n - `{serial}` for a local Modbus RTU connection\n - `{rfc2217}` for a remote Telnet-based Modbus RTU connection.",
|
||||
"modbus_unit": "Unit identification for your heat pump. Can usually be left at 0."
|
||||
}
|
||||
},
|
||||
|
@@ -34,6 +34,7 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = NordPoolDataUpdateCoordinator(hass, config_entry)
|
||||
await coordinator.fetch_data(dt_util.utcnow(), True)
|
||||
await coordinator.update_listeners(dt_util.utcnow())
|
||||
if not coordinator.last_update_success:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
|
@@ -44,9 +44,10 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
name=DOMAIN,
|
||||
)
|
||||
self.client = NordPoolClient(session=async_get_clientsession(hass))
|
||||
self.unsub: Callable[[], None] | None = None
|
||||
self.data_unsub: Callable[[], None] | None = None
|
||||
self.listener_unsub: Callable[[], None] | None = None
|
||||
|
||||
def get_next_interval(self, now: datetime) -> datetime:
|
||||
def get_next_data_interval(self, now: datetime) -> datetime:
|
||||
"""Compute next time an update should occur."""
|
||||
next_hour = dt_util.utcnow() + timedelta(hours=1)
|
||||
next_run = datetime(
|
||||
@@ -56,23 +57,45 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
next_hour.hour,
|
||||
tzinfo=dt_util.UTC,
|
||||
)
|
||||
LOGGER.debug("Next update at %s", next_run)
|
||||
LOGGER.debug("Next data update at %s", next_run)
|
||||
return next_run
|
||||
|
||||
def get_next_15_interval(self, now: datetime) -> datetime:
|
||||
"""Compute next time we need to notify listeners."""
|
||||
next_run = dt_util.utcnow() + timedelta(minutes=15)
|
||||
next_minute = next_run.minute // 15 * 15
|
||||
next_run = next_run.replace(
|
||||
minute=next_minute, second=0, microsecond=0, tzinfo=dt_util.UTC
|
||||
)
|
||||
|
||||
LOGGER.debug("Next listener update at %s", next_run)
|
||||
return next_run
|
||||
|
||||
async def async_shutdown(self) -> None:
|
||||
"""Cancel any scheduled call, and ignore new runs."""
|
||||
await super().async_shutdown()
|
||||
if self.unsub:
|
||||
self.unsub()
|
||||
self.unsub = None
|
||||
if self.data_unsub:
|
||||
self.data_unsub()
|
||||
self.data_unsub = None
|
||||
if self.listener_unsub:
|
||||
self.listener_unsub()
|
||||
self.listener_unsub = None
|
||||
|
||||
async def update_listeners(self, now: datetime) -> None:
|
||||
"""Update entity listeners."""
|
||||
self.listener_unsub = async_track_point_in_utc_time(
|
||||
self.hass,
|
||||
self.update_listeners,
|
||||
self.get_next_15_interval(dt_util.utcnow()),
|
||||
)
|
||||
self.async_update_listeners()
|
||||
|
||||
async def fetch_data(self, now: datetime, initial: bool = False) -> None:
|
||||
"""Fetch data from Nord Pool."""
|
||||
self.unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow())
|
||||
self.data_unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_data_interval(dt_util.utcnow())
|
||||
)
|
||||
if self.config_entry.pref_disable_polling and not initial:
|
||||
self.async_update_listeners()
|
||||
return
|
||||
try:
|
||||
data = await self.handle_data(initial)
|
||||
|
@@ -157,7 +157,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
) from error
|
||||
except NordPoolEmptyResponseError:
|
||||
return {area: [] for area in areas}
|
||||
except NordPoolError as error:
|
||||
except (NordPoolError, TimeoutError) as error:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
|
@@ -307,7 +307,7 @@
|
||||
},
|
||||
"markdown": {
|
||||
"name": "Format as Markdown",
|
||||
"description": "Enable Markdown formatting for the message body (Web app only). See the Markdown guide for syntax details: https://www.markdownguide.org/basic-syntax/."
|
||||
"description": "Enable Markdown formatting for the message body. See the Markdown guide for syntax details: https://www.markdownguide.org/basic-syntax/."
|
||||
},
|
||||
"tags": {
|
||||
"name": "Tags/Emojis",
|
||||
|
@@ -89,7 +89,10 @@ class NuHeatConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={"nuheat_url": "https://MyNuHeat.com"},
|
||||
)
|
||||
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Connect to the NuHeat",
|
||||
"description": "You will need to obtain your thermostat\u2019s numeric serial number or ID by logging into https://MyNuHeat.com and selecting your thermostat(s).",
|
||||
"description": "You will need to obtain your thermostat\u2019s numeric serial number or ID by logging into {nuheat_url} and selecting your thermostat(s).",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
|
@@ -35,7 +35,7 @@ from .const import CONF_DELETE_PERMANENTLY, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPLOAD_CHUNK_SIZE = 16 * 320 * 1024 # 5.2MB
|
||||
UPLOAD_CHUNK_SIZE = 32 * 320 * 1024 # 10.4MB
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
METADATA_VERSION = 2
|
||||
CACHE_TTL = 300
|
||||
@@ -163,7 +163,10 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
)
|
||||
try:
|
||||
backup_file = await LargeFileUploadClient.upload(
|
||||
self._token_function, file, session=async_get_clientsession(self._hass)
|
||||
self._token_function,
|
||||
file,
|
||||
upload_chunk_size=UPLOAD_CHUNK_SIZE,
|
||||
session=async_get_clientsession(self._hass),
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
raise BackupAgentError(
|
||||
|
@@ -68,7 +68,7 @@ class OpenUvCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
|
||||
|
||||
class OpenUvProtectionWindowCoordinator(OpenUvCoordinator):
|
||||
"""Define an OpenUV data coordinator for the protetction window."""
|
||||
"""Define an OpenUV data coordinator for the protection window."""
|
||||
|
||||
_reprocess_listener: CALLBACK_TYPE | None = None
|
||||
|
||||
@@ -76,10 +76,18 @@ class OpenUvProtectionWindowCoordinator(OpenUvCoordinator):
|
||||
data = await super()._async_update_data()
|
||||
|
||||
for key in ("from_time", "to_time", "from_uv", "to_uv"):
|
||||
if not data.get(key):
|
||||
msg = "Skipping update due to missing data: {key}"
|
||||
# a key missing from the data is an error.
|
||||
if key not in data:
|
||||
msg = f"Update failed due to missing data: {key}"
|
||||
raise UpdateFailed(msg)
|
||||
|
||||
# check for null or zero value in the data & skip further processing
|
||||
# of this update if one is found. this is a normal condition
|
||||
# indicating that there is no protection window.
|
||||
if not data[key]:
|
||||
LOGGER.warning("Skipping update due to missing data: %s", key)
|
||||
return {}
|
||||
|
||||
data = self._parse_data(data)
|
||||
data = self._process_data(data)
|
||||
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["opower==0.15.5"]
|
||||
"requirements": ["opower==0.15.7"]
|
||||
}
|
||||
|
@@ -75,6 +75,9 @@ async def _title(hass: HomeAssistant, discovery_info: HassioServiceInfo) -> str:
|
||||
if device and ("Connect_ZBT-1" in device or "SkyConnect" in device):
|
||||
return f"Home Assistant Connect ZBT-1 ({discovery_info.name})"
|
||||
|
||||
if device and "Nabu_Casa_ZBT-2" in device:
|
||||
return f"Home Assistant Connect ZBT-2 ({discovery_info.name})"
|
||||
|
||||
return discovery_info.name
|
||||
|
||||
|
||||
|
@@ -210,7 +210,9 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the local authentication step via config flow."""
|
||||
errors = {}
|
||||
description_placeholders = {}
|
||||
description_placeholders = {
|
||||
"somfy-developer-mode-docs": "https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started"
|
||||
}
|
||||
|
||||
if user_input:
|
||||
self._host = user_input[CONF_HOST]
|
||||
|
@@ -120,7 +120,7 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
|
||||
icon="mdi:water",
|
||||
native_unit_of_measurement=UnitOfVolume.LITERS,
|
||||
device_class=SensorDeviceClass.WATER,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
OverkizSensorDescription(
|
||||
key=OverkizState.IO_OUTLET_ENGINE,
|
||||
|
@@ -32,7 +32,7 @@
|
||||
}
|
||||
},
|
||||
"local": {
|
||||
"description": "By activating the [Developer Mode of your TaHoma box](https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started), you can authorize third-party software (like Home Assistant) to connect to it via your local network.\n\n1. Open the TaHoma By Somfy application on your device.\n2. Navigate to the Help & advanced features -> Advanced features menu in the application.\n3. Activate Developer Mode by tapping 7 times on the version number of your gateway (like 2025.1.4-11).\n4. Generate a token from the Developer Mode menu to authenticate your API calls.\n\n5. Enter the generated token below and update the host to include your Gateway PIN or the IP address of your gateway.",
|
||||
"description": "By activating the [Developer Mode of your TaHoma box]({somfy-developer-mode-docs}), you can authorize third-party software (like Home Assistant) to connect to it via your local network.\n\n1. Open the TaHoma By Somfy application on your device.\n2. Navigate to the Help & advanced features -> Advanced features menu in the application.\n3. Activate Developer Mode by tapping 7 times on the version number of your gateway (like 2025.1.4-11).\n4. Generate a token from the Developer Mode menu to authenticate your API calls.\n\n5. Enter the generated token below and update the host to include your Gateway PIN or the IP address of your gateway.",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"token": "[%key:common::config_flow::data::api_token%]",
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ovoenergy"],
|
||||
"requirements": ["ovoenergy==2.0.1"]
|
||||
"requirements": ["ovoenergy==3.0.1"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/portainer",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==0.1.7"]
|
||||
"requirements": ["pyportainer==1.0.3"]
|
||||
}
|
||||
|
@@ -15,5 +15,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyprobeplus==1.0.1"]
|
||||
"requirements": ["pyprobeplus==1.1.0"]
|
||||
}
|
||||
|
@@ -215,6 +215,7 @@ def create_coordinator_container_vm(
|
||||
return DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=None,
|
||||
name=f"proxmox_coordinator_{host_name}_{node_name}_{vm_id}",
|
||||
update_method=async_update_data,
|
||||
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
||||
|
@@ -16,7 +16,6 @@ ATTR_HTML: Final = "html"
|
||||
ATTR_CALLBACK_URL: Final = "callback_url"
|
||||
ATTR_EXPIRE: Final = "expire"
|
||||
ATTR_TTL: Final = "ttl"
|
||||
ATTR_DATA: Final = "data"
|
||||
ATTR_TIMESTAMP: Final = "timestamp"
|
||||
|
||||
CONF_USER_KEY: Final = "user_key"
|
||||
|
@@ -67,7 +67,7 @@ class PushoverNotificationService(BaseNotificationService):
|
||||
|
||||
# Extract params from data dict
|
||||
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
|
||||
data = kwargs.get(ATTR_DATA, {})
|
||||
data = kwargs.get(ATTR_DATA) or {}
|
||||
url = data.get(ATTR_URL)
|
||||
url_title = data.get(ATTR_URL_TITLE)
|
||||
priority = data.get(ATTR_PRIORITY)
|
||||
|
@@ -88,7 +88,7 @@ class PushsaferNotificationService(BaseNotificationService):
|
||||
_LOGGER.debug("%s target(s) specified", len(targets))
|
||||
|
||||
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
|
||||
data = kwargs.get(ATTR_DATA, {})
|
||||
data = kwargs.get(ATTR_DATA) or {}
|
||||
|
||||
# Converting the specified image to base64
|
||||
picture1 = data.get(ATTR_PICTURE1)
|
||||
|
@@ -72,6 +72,7 @@ REAUTH_SCHEMA = vol.Schema(
|
||||
),
|
||||
}
|
||||
)
|
||||
PLACEHOLDER = {"example_url": "https://example.com:8000/path"}
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, user_input: dict[str, Any]) -> None:
|
||||
@@ -134,6 +135,7 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=PLACEHOLDER,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
@@ -211,7 +213,10 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
STEP_USER_DATA_SCHEMA,
|
||||
suggested_values,
|
||||
),
|
||||
description_placeholders={CONF_NAME: reconfig_entry.data[CONF_USERNAME]},
|
||||
description_placeholders={
|
||||
CONF_NAME: reconfig_entry.data[CONF_USERNAME],
|
||||
**PLACEHOLDER,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "Specify the full URL of your pyLoad web interface, including the protocol (HTTP or HTTPS), hostname or IP address, port (pyLoad uses 8000 by default), and any path prefix if applicable.\nExample: `https://example.com:8000/path`",
|
||||
"url": "Specify the full URL of your pyLoad web interface, including the protocol (HTTP or HTTPS), hostname or IP address, port (pyLoad uses 8000 by default), and any path prefix if applicable.\nExample: `{example_url}`",
|
||||
"username": "The username used to access the pyLoad instance.",
|
||||
"password": "The password associated with the pyLoad account.",
|
||||
"verify_ssl": "If checked, the SSL certificate will be validated to ensure a secure connection."
|
||||
|
@@ -39,6 +39,23 @@ from .renault_vehicle import COORDINATORS, RenaultVehicleProxy
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def _get_filtered_vehicles(account: RenaultAccount) -> list[KamereonVehiclesLink]:
|
||||
"""Filter out vehicles with missing details.
|
||||
|
||||
May be due to new purchases, or issue with the Renault servers.
|
||||
"""
|
||||
vehicles = await account.get_vehicles()
|
||||
if not vehicles.vehicleLinks:
|
||||
return []
|
||||
result: list[KamereonVehiclesLink] = []
|
||||
for link in vehicles.vehicleLinks:
|
||||
if link.vehicleDetails is None:
|
||||
LOGGER.warning("Ignoring vehicle with missing details: %s", link.vin)
|
||||
continue
|
||||
result.append(link)
|
||||
return result
|
||||
|
||||
|
||||
class RenaultHub:
|
||||
"""Handle account communication with Renault servers."""
|
||||
|
||||
@@ -84,49 +101,48 @@ class RenaultHub:
|
||||
account_id: str = config_entry.data[CONF_KAMEREON_ACCOUNT_ID]
|
||||
|
||||
self._account = await self._client.get_api_account(account_id)
|
||||
vehicles = await self._account.get_vehicles()
|
||||
if vehicles.vehicleLinks:
|
||||
if any(
|
||||
vehicle_link.vehicleDetails is None
|
||||
for vehicle_link in vehicles.vehicleLinks
|
||||
):
|
||||
raise ConfigEntryNotReady(
|
||||
"Failed to retrieve vehicle details from Renault servers"
|
||||
)
|
||||
|
||||
num_call_per_scan = len(COORDINATORS) * len(vehicles.vehicleLinks)
|
||||
scan_interval = timedelta(
|
||||
seconds=(3600 * num_call_per_scan) / MAX_CALLS_PER_HOURS
|
||||
vehicle_links = await _get_filtered_vehicles(self._account)
|
||||
if not vehicle_links:
|
||||
LOGGER.debug(
|
||||
"No valid vehicle details found for account_id: %s", account_id
|
||||
)
|
||||
raise ConfigEntryNotReady(
|
||||
"Failed to retrieve vehicle details from Renault servers"
|
||||
)
|
||||
|
||||
device_registry = dr.async_get(self._hass)
|
||||
await asyncio.gather(
|
||||
*(
|
||||
self.async_initialise_vehicle(
|
||||
vehicle_link,
|
||||
self._account,
|
||||
scan_interval,
|
||||
config_entry,
|
||||
device_registry,
|
||||
)
|
||||
for vehicle_link in vehicles.vehicleLinks
|
||||
)
|
||||
)
|
||||
num_call_per_scan = len(COORDINATORS) * len(vehicle_links)
|
||||
scan_interval = timedelta(
|
||||
seconds=(3600 * num_call_per_scan) / MAX_CALLS_PER_HOURS
|
||||
)
|
||||
|
||||
# all vehicles have been initiated with the right number of active coordinators
|
||||
num_call_per_scan = 0
|
||||
for vehicle_link in vehicles.vehicleLinks:
|
||||
device_registry = dr.async_get(self._hass)
|
||||
await asyncio.gather(
|
||||
*(
|
||||
self.async_initialise_vehicle(
|
||||
vehicle_link,
|
||||
self._account,
|
||||
scan_interval,
|
||||
config_entry,
|
||||
device_registry,
|
||||
)
|
||||
for vehicle_link in vehicle_links
|
||||
)
|
||||
)
|
||||
|
||||
# all vehicles have been initiated with the right number of active coordinators
|
||||
num_call_per_scan = 0
|
||||
for vehicle_link in vehicle_links:
|
||||
vehicle = self._vehicles[str(vehicle_link.vin)]
|
||||
num_call_per_scan += len(vehicle.coordinators)
|
||||
|
||||
new_scan_interval = timedelta(
|
||||
seconds=(3600 * num_call_per_scan) / MAX_CALLS_PER_HOURS
|
||||
)
|
||||
if new_scan_interval != scan_interval:
|
||||
# we need to change the vehicles with the right scan interval
|
||||
for vehicle_link in vehicle_links:
|
||||
vehicle = self._vehicles[str(vehicle_link.vin)]
|
||||
num_call_per_scan += len(vehicle.coordinators)
|
||||
|
||||
new_scan_interval = timedelta(
|
||||
seconds=(3600 * num_call_per_scan) / MAX_CALLS_PER_HOURS
|
||||
)
|
||||
if new_scan_interval != scan_interval:
|
||||
# we need to change the vehicles with the right scan interval
|
||||
for vehicle_link in vehicles.vehicleLinks:
|
||||
vehicle = self._vehicles[str(vehicle_link.vin)]
|
||||
vehicle.update_scan_interval(new_scan_interval)
|
||||
vehicle.update_scan_interval(new_scan_interval)
|
||||
|
||||
async def async_initialise_vehicle(
|
||||
self,
|
||||
@@ -164,10 +180,10 @@ class RenaultHub:
|
||||
"""Get Kamereon account ids."""
|
||||
accounts = []
|
||||
for account in await self._client.get_api_accounts():
|
||||
vehicles = await account.get_vehicles()
|
||||
vehicle_links = await _get_filtered_vehicles(account)
|
||||
|
||||
# Only add the account if it has linked vehicles.
|
||||
if vehicles.vehicleLinks:
|
||||
if vehicle_links:
|
||||
accounts.append(account.account_id)
|
||||
return accounts
|
||||
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.16.0"]
|
||||
"requirements": ["reolink-aio==0.16.1"]
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user