mirror of
https://github.com/home-assistant/core.git
synced 2025-10-14 06:09:39 +00:00
Compare commits
207 Commits
water_hier
...
dev
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ae8678b2af | ||
![]() |
b52ee6915a | ||
![]() |
b0e1b00598 | ||
![]() |
fd902af23b | ||
![]() |
07d6ebef4c | ||
![]() |
c9b9f05f4b | ||
![]() |
90a0262217 | ||
![]() |
324aa09ebe | ||
![]() |
663431fc80 | ||
![]() |
610183c11b | ||
![]() |
b7718f6f0f | ||
![]() |
5708f61964 | ||
![]() |
4fb3c9fed2 | ||
![]() |
1e5f5f4ad3 | ||
![]() |
82c536a4e9 | ||
![]() |
97afec1912 | ||
![]() |
0bfdd70730 | ||
![]() |
01dee6507b | ||
![]() |
04f83bc067 | ||
![]() |
f0756af52d | ||
![]() |
dd6bc715d8 | ||
![]() |
1452aec47f | ||
![]() |
6f8439de5b | ||
![]() |
f649717372 | ||
![]() |
bf273ef407 | ||
![]() |
94d015e00a | ||
![]() |
f185ffddf1 | ||
![]() |
2d0b4dd7e9 | ||
![]() |
eab1205823 | ||
![]() |
a991dcbe6a | ||
![]() |
6f79a65762 | ||
![]() |
ce1fdc6b75 | ||
![]() |
d7aa0834c7 | ||
![]() |
3151384867 | ||
![]() |
8aa5e7de91 | ||
![]() |
cca5c807ad | ||
![]() |
89433219dd | ||
![]() |
694b169c79 | ||
![]() |
f1e0954c61 | ||
![]() |
3c3b4ef14a | ||
![]() |
54ff49115c | ||
![]() |
2512dad843 | ||
![]() |
a3b67d5f28 | ||
![]() |
76a0b2d616 | ||
![]() |
1182082c1f | ||
![]() |
e0811558cb | ||
![]() |
d389405218 | ||
![]() |
3a71087c9c | ||
![]() |
c7d7cfa7ad | ||
![]() |
e4ea79866d | ||
![]() |
ddfa6f33d2 | ||
![]() |
15e99650aa | ||
![]() |
58bacbb84e | ||
![]() |
82758f7671 | ||
![]() |
7739cdc626 | ||
![]() |
4ca1ae61aa | ||
![]() |
3d130a9bdf | ||
![]() |
2b38f33d50 | ||
![]() |
19dedb038e | ||
![]() |
59781422f7 | ||
![]() |
083277d1ff | ||
![]() |
9b9c55b37b | ||
![]() |
c9d67d596b | ||
![]() |
7948b35265 | ||
![]() |
be843970fd | ||
![]() |
53b65b2fb4 | ||
![]() |
ac7be97245 | ||
![]() |
09e539bf0e | ||
![]() |
6ef1b3bad3 | ||
![]() |
38e46f7a53 | ||
![]() |
ef60d16659 | ||
![]() |
bf4f8b48a3 | ||
![]() |
3c1496d2bb | ||
![]() |
d457787639 | ||
![]() |
de4bfd6f05 | ||
![]() |
34c5748132 | ||
![]() |
5bfd9620db | ||
![]() |
6f8766e4bd | ||
![]() |
d3b519846b | ||
![]() |
36d952800b | ||
![]() |
b832561e53 | ||
![]() |
c59d295bf2 | ||
![]() |
6e28e3aed1 | ||
![]() |
6d8944d379 | ||
![]() |
762fd6d241 | ||
![]() |
4c6500e7a4 | ||
![]() |
cdc224715f | ||
![]() |
648b250fc8 | ||
![]() |
ba61562300 | ||
![]() |
8d67182e0e | ||
![]() |
3ce1ef4c3f | ||
![]() |
bde4eb5011 | ||
![]() |
a58a7065b6 | ||
![]() |
0c9b72bf1d | ||
![]() |
541d94d8c6 | ||
![]() |
c370c86a4f | ||
![]() |
bc6accf4ae | ||
![]() |
d40eeee422 | ||
![]() |
c9d9730c4a | ||
![]() |
d3a8f3191b | ||
![]() |
cb3829ddee | ||
![]() |
73383e6c26 | ||
![]() |
217894ee8b | ||
![]() |
c7321a337e | ||
![]() |
517124dfbe | ||
![]() |
f49299b009 | ||
![]() |
1001da08f6 | ||
![]() |
0da019404c | ||
![]() |
9a4280d0de | ||
![]() |
c28e105df5 | ||
![]() |
68787248f6 | ||
![]() |
36be6b6187 | ||
![]() |
42dea92c51 | ||
![]() |
4b828d4753 | ||
![]() |
8e79c38f34 | ||
![]() |
c92107b8d4 | ||
![]() |
b25622f40e | ||
![]() |
e887d5e6ad | ||
![]() |
1f19e40cfe | ||
![]() |
3d2d2271d3 | ||
![]() |
d1dd5eecd6 | ||
![]() |
cdec29ffb7 | ||
![]() |
07f3e00f18 | ||
![]() |
084d029168 | ||
![]() |
17e997ee18 | ||
![]() |
16d4c6c95a | ||
![]() |
0205a636ef | ||
![]() |
4707fd2f94 | ||
![]() |
ad3cadab83 | ||
![]() |
3fce815415 | ||
![]() |
ee67619cb1 | ||
![]() |
1a744a2c91 | ||
![]() |
951978e483 | ||
![]() |
54d30377d3 | ||
![]() |
eb04dda197 | ||
![]() |
1e192aadfa | ||
![]() |
6f680f3d03 | ||
![]() |
f0663dc275 | ||
![]() |
96bb67bef9 | ||
![]() |
929d76e236 | ||
![]() |
fe1ff083de | ||
![]() |
90c68f8ad0 | ||
![]() |
6b79aa7738 | ||
![]() |
f6fb4c8d5a | ||
![]() |
a6e575ecfa | ||
![]() |
85392ae167 | ||
![]() |
9d124be491 | ||
![]() |
8bca3931ab | ||
![]() |
0367a01287 | ||
![]() |
86e2c2f361 | ||
![]() |
335c8e50a2 | ||
![]() |
8152a9e5da | ||
![]() |
250e562caf | ||
![]() |
a3b641e53d | ||
![]() |
135ea4c02e | ||
![]() |
bc980c1212 | ||
![]() |
59ca88a7e8 | ||
![]() |
d45114cd11 | ||
![]() |
2eba650064 | ||
![]() |
de4adb8855 | ||
![]() |
1d86c03b02 | ||
![]() |
77fb1036cc | ||
![]() |
b15b4e4888 | ||
![]() |
dddf6d5f1a | ||
![]() |
66fb5f4d95 | ||
![]() |
42a9d5d4e3 | ||
![]() |
93fa162913 | ||
![]() |
c432b1c8da | ||
![]() |
00955b8e6a | ||
![]() |
045b9d7f01 | ||
![]() |
438c4c7871 | ||
![]() |
abc360460c | ||
![]() |
26437bb253 | ||
![]() |
56d953ac1e | ||
![]() |
fe4eb8766d | ||
![]() |
2d9f14c401 | ||
![]() |
7b6ccb07fd | ||
![]() |
2ba5728060 | ||
![]() |
b5f163cc85 | ||
![]() |
65540a3e0b | ||
![]() |
cbf1b39edb | ||
![]() |
142daf5e49 | ||
![]() |
8bd0ff7cca | ||
![]() |
ac676e12f6 | ||
![]() |
c0ac3292cd | ||
![]() |
80fd07c128 | ||
![]() |
3701d8859a | ||
![]() |
6dd26bae88 | ||
![]() |
1a0abe296c | ||
![]() |
de6c61a4ab | ||
![]() |
33c677596e | ||
![]() |
e9b4b8e99b | ||
![]() |
0525c04c42 | ||
![]() |
d57b502551 | ||
![]() |
9fb708baf4 | ||
![]() |
abdf24b7a0 | ||
![]() |
29bfbd27bb | ||
![]() |
224553f8d9 | ||
![]() |
7c9f6a061f | ||
![]() |
8e115d4685 | ||
![]() |
00c189844f | ||
![]() |
4587c286bb | ||
![]() |
b46097a7fc | ||
![]() |
299cb6a2ff | ||
![]() |
1b7b91b328 | ||
![]() |
01a1480ebd | ||
![]() |
26b8abb118 |
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 8
|
||||
CACHE_VERSION: 9
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
@@ -525,7 +525,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install -U "pip>=21.3.1" setuptools wheel
|
||||
uv pip install -U "pip>=25.2"
|
||||
uv pip install -r requirements.txt
|
||||
python -m script.gen_requirements_all ci
|
||||
uv pip install -r requirements_all_pytest.txt -r requirements_test.txt
|
||||
@@ -625,7 +625,7 @@ jobs:
|
||||
steps:
|
||||
- *checkout
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0
|
||||
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -741,7 +741,7 @@ jobs:
|
||||
- name: Generate partial mypy restore key
|
||||
id: generate-mypy-key
|
||||
run: |
|
||||
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
|
||||
mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3)
|
||||
echo "version=$mypy_version" >> $GITHUB_OUTPUT
|
||||
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -79,7 +79,6 @@ junit.xml
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
.python-version
|
||||
.tool-versions
|
||||
|
||||
# emacs auto backups
|
||||
|
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.13
|
@@ -221,6 +221,7 @@ homeassistant.components.generic_thermostat.*
|
||||
homeassistant.components.geo_location.*
|
||||
homeassistant.components.geocaching.*
|
||||
homeassistant.components.gios.*
|
||||
homeassistant.components.github.*
|
||||
homeassistant.components.glances.*
|
||||
homeassistant.components.go2rtc.*
|
||||
homeassistant.components.goalzero.*
|
||||
|
18
CODEOWNERS
generated
18
CODEOWNERS
generated
@@ -46,6 +46,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/accuweather/ @bieniu
|
||||
/homeassistant/components/acmeda/ @atmurray
|
||||
/tests/components/acmeda/ @atmurray
|
||||
/homeassistant/components/actron_air/ @kclif9 @JagadishDhanamjayam
|
||||
/tests/components/actron_air/ @kclif9 @JagadishDhanamjayam
|
||||
/homeassistant/components/adax/ @danielhiversen @lazytarget
|
||||
/tests/components/adax/ @danielhiversen @lazytarget
|
||||
/homeassistant/components/adguard/ @frenck
|
||||
@@ -762,8 +764,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @MaestroOnICe
|
||||
/tests/components/iometer/ @MaestroOnICe
|
||||
/homeassistant/components/iometer/ @jukrebs
|
||||
/tests/components/iometer/ @jukrebs
|
||||
/homeassistant/components/ios/ @robbiet480
|
||||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
@@ -1065,8 +1067,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/nilu/ @hfurubotten
|
||||
/homeassistant/components/nina/ @DeerMaximum
|
||||
/tests/components/nina/ @DeerMaximum
|
||||
/homeassistant/components/nintendo_parental/ @pantherale0
|
||||
/tests/components/nintendo_parental/ @pantherale0
|
||||
/homeassistant/components/nintendo_parental_controls/ @pantherale0
|
||||
/tests/components/nintendo_parental_controls/ @pantherale0
|
||||
/homeassistant/components/nissan_leaf/ @filcole
|
||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
@@ -1413,8 +1415,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/sfr_box/ @epenet
|
||||
/homeassistant/components/sftp_storage/ @maretodoric
|
||||
/tests/components/sftp_storage/ @maretodoric
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
|
||||
/homeassistant/components/shell_command/ @home-assistant/core
|
||||
/tests/components/shell_command/ @home-assistant/core
|
||||
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
@@ -1479,8 +1481,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/snoo/ @Lash-L
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
||||
/tests/components/solaredge/ @frenck @bdraco
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco @tronikos
|
||||
/tests/components/solaredge/ @frenck @bdraco @tronikos
|
||||
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
||||
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
|
||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||
|
@@ -34,9 +34,11 @@ WORKDIR /usr/src
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
RUN uv python install 3.13.2
|
||||
|
||||
USER vscode
|
||||
|
||||
COPY .python-version ./
|
||||
RUN uv python install
|
||||
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
RUN uv venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)
|
||||
|
57
homeassistant/components/actron_air/__init__.py
Normal file
57
homeassistant/components/actron_air/__init__.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""The Actron Air integration."""
|
||||
|
||||
from actron_neo_api import (
|
||||
ActronAirNeoACSystem,
|
||||
ActronNeoAPI,
|
||||
ActronNeoAPIError,
|
||||
ActronNeoAuthError,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import _LOGGER
|
||||
from .coordinator import (
|
||||
ActronAirConfigEntry,
|
||||
ActronAirRuntimeData,
|
||||
ActronAirSystemCoordinator,
|
||||
)
|
||||
|
||||
PLATFORM = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
"""Set up Actron Air integration from a config entry."""
|
||||
|
||||
api = ActronNeoAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||
systems: list[ActronAirNeoACSystem] = []
|
||||
|
||||
try:
|
||||
systems = await api.get_ac_systems()
|
||||
await api.update_status()
|
||||
except ActronNeoAuthError:
|
||||
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
||||
raise
|
||||
except ActronNeoAPIError as err:
|
||||
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
||||
raise
|
||||
|
||||
system_coordinators: dict[str, ActronAirSystemCoordinator] = {}
|
||||
for system in systems:
|
||||
coordinator = ActronAirSystemCoordinator(hass, entry, api, system)
|
||||
_LOGGER.debug("Setting up coordinator for system: %s", system["serial"])
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
system_coordinators[system["serial"]] = coordinator
|
||||
|
||||
entry.runtime_data = ActronAirRuntimeData(
|
||||
api=api,
|
||||
system_coordinators=system_coordinators,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORM)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORM)
|
259
homeassistant/components/actron_air/climate.py
Normal file
259
homeassistant/components/actron_air/climate.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""Climate platform for Actron Air integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronAirNeoStatus, ActronAirNeoZone
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
FAN_MODE_MAPPING_ACTRONAIR_TO_HA = {
|
||||
"AUTO": FAN_AUTO,
|
||||
"LOW": FAN_LOW,
|
||||
"MED": FAN_MEDIUM,
|
||||
"HIGH": FAN_HIGH,
|
||||
}
|
||||
FAN_MODE_MAPPING_HA_TO_ACTRONAIR = {
|
||||
v: k for k, v in FAN_MODE_MAPPING_ACTRONAIR_TO_HA.items()
|
||||
}
|
||||
HVAC_MODE_MAPPING_ACTRONAIR_TO_HA = {
|
||||
"COOL": HVACMode.COOL,
|
||||
"HEAT": HVACMode.HEAT,
|
||||
"FAN": HVACMode.FAN_ONLY,
|
||||
"AUTO": HVACMode.AUTO,
|
||||
"OFF": HVACMode.OFF,
|
||||
}
|
||||
HVAC_MODE_MAPPING_HA_TO_ACTRONAIR = {
|
||||
v: k for k, v in HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.items()
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Actron Air climate entities."""
|
||||
system_coordinators = entry.runtime_data.system_coordinators
|
||||
entities: list[ClimateEntity] = []
|
||||
|
||||
for coordinator in system_coordinators.values():
|
||||
status = coordinator.data
|
||||
name = status.ac_system.system_name
|
||||
entities.append(ActronSystemClimate(coordinator, name))
|
||||
|
||||
entities.extend(
|
||||
ActronZoneClimate(coordinator, zone)
|
||||
for zone in status.remote_zone_info
|
||||
if zone.exists
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class BaseClimateEntity(CoordinatorEntity[ActronAirSystemCoordinator], ClimateEntity):
|
||||
"""Base class for Actron Air climate entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
_attr_name = None
|
||||
_attr_fan_modes = list(FAN_MODE_MAPPING_ACTRONAIR_TO_HA.values())
|
||||
_attr_hvac_modes = list(HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.values())
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
name: str,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator)
|
||||
self._serial_number = coordinator.serial_number
|
||||
|
||||
|
||||
class ActronSystemClimate(BaseClimateEntity):
|
||||
"""Representation of the Actron Air system."""
|
||||
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
name: str,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator, name)
|
||||
serial_number = coordinator.serial_number
|
||||
self._attr_unique_id = serial_number
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial_number)},
|
||||
name=self._status.ac_system.system_name,
|
||||
manufacturer="Actron Air",
|
||||
model_id=self._status.ac_system.master_wc_model,
|
||||
sw_version=self._status.ac_system.master_wc_firmware_version,
|
||||
serial_number=serial_number,
|
||||
)
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature that can be set."""
|
||||
return self._status.min_temp
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum temperature that can be set."""
|
||||
return self._status.max_temp
|
||||
|
||||
@property
|
||||
def _status(self) -> ActronAirNeoStatus:
|
||||
"""Get the current status from the coordinator."""
|
||||
return self.coordinator.data
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
if not self._status.user_aircon_settings.is_on:
|
||||
return HVACMode.OFF
|
||||
|
||||
mode = self._status.user_aircon_settings.mode
|
||||
return HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.get(mode)
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_mode = self._status.user_aircon_settings.fan_mode
|
||||
return FAN_MODE_MAPPING_ACTRONAIR_TO_HA.get(fan_mode)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float:
|
||||
"""Return the current humidity."""
|
||||
return self._status.master_info.live_humidity_pc
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float:
|
||||
"""Return the current temperature."""
|
||||
return self._status.master_info.live_temp_c
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float:
|
||||
"""Return the target temperature."""
|
||||
return self._status.user_aircon_settings.temperature_setpoint_cool_c
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set a new fan mode."""
|
||||
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode.lower())
|
||||
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode)
|
||||
await self._status.ac_system.set_system_mode(ac_mode)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
await self._status.user_aircon_settings.set_temperature(temperature=temp)
|
||||
|
||||
|
||||
class ActronZoneClimate(BaseClimateEntity):
|
||||
"""Representation of a zone within the Actron Air system."""
|
||||
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
zone: ActronAirNeoZone,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator, zone.title)
|
||||
serial_number = coordinator.serial_number
|
||||
self._zone_id: int = zone.zone_id
|
||||
self._attr_unique_id: str = f"{serial_number}_zone_{zone.zone_id}"
|
||||
self._attr_device_info: DeviceInfo = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._attr_unique_id)},
|
||||
name=zone.title,
|
||||
manufacturer="Actron Air",
|
||||
model="Zone",
|
||||
suggested_area=zone.title,
|
||||
via_device=(DOMAIN, serial_number),
|
||||
)
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature that can be set."""
|
||||
return self._zone.min_temp
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum temperature that can be set."""
|
||||
return self._zone.max_temp
|
||||
|
||||
@property
|
||||
def _zone(self) -> ActronAirNeoZone:
|
||||
"""Get the current zone data from the coordinator."""
|
||||
status = self.coordinator.data
|
||||
return status.zones[self._zone_id]
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
if self._zone.is_active:
|
||||
mode = self._zone.hvac_mode
|
||||
return HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.get(mode)
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
return self._zone.humidity
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._zone.live_temp_c
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
return self._zone.temperature_setpoint_cool_c
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
is_enabled = hvac_mode != HVACMode.OFF
|
||||
await self._zone.enable(is_enabled)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
await self._zone.set_temperature(temperature=kwargs["temperature"])
|
132
homeassistant/components/actron_air/config_flow.py
Normal file
132
homeassistant/components/actron_air/config_flow.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""Setup config flow for Actron Air integration."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronNeoAPI, ActronNeoAuthError
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
|
||||
|
||||
class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Actron Air."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._api: ActronNeoAPI | None = None
|
||||
self._device_code: str | None = None
|
||||
self._user_code: str = ""
|
||||
self._verification_uri: str = ""
|
||||
self._expires_minutes: str = "30"
|
||||
self.login_task: asyncio.Task | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if self._api is None:
|
||||
_LOGGER.debug("Initiating device authorization")
|
||||
self._api = ActronNeoAPI()
|
||||
try:
|
||||
device_code_response = await self._api.request_device_code()
|
||||
except ActronNeoAuthError as err:
|
||||
_LOGGER.error("OAuth2 flow failed: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
self._device_code = device_code_response["device_code"]
|
||||
self._user_code = device_code_response["user_code"]
|
||||
self._verification_uri = device_code_response["verification_uri_complete"]
|
||||
self._expires_minutes = str(device_code_response["expires_in"] // 60)
|
||||
|
||||
async def _wait_for_authorization() -> None:
|
||||
"""Wait for the user to authorize the device."""
|
||||
assert self._api is not None
|
||||
assert self._device_code is not None
|
||||
_LOGGER.debug("Waiting for device authorization")
|
||||
try:
|
||||
await self._api.poll_for_token(self._device_code)
|
||||
_LOGGER.debug("Authorization successful")
|
||||
except ActronNeoAuthError as ex:
|
||||
_LOGGER.exception("Error while waiting for device authorization")
|
||||
raise CannotConnect from ex
|
||||
|
||||
_LOGGER.debug("Checking login task")
|
||||
if self.login_task is None:
|
||||
_LOGGER.debug("Creating task for device authorization")
|
||||
self.login_task = self.hass.async_create_task(_wait_for_authorization())
|
||||
|
||||
if self.login_task.done():
|
||||
_LOGGER.debug("Login task is done, checking results")
|
||||
if exception := self.login_task.exception():
|
||||
if isinstance(exception, CannotConnect):
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="connection_error"
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id="timeout")
|
||||
return self.async_show_progress_done(next_step_id="finish_login")
|
||||
|
||||
return self.async_show_progress(
|
||||
step_id="user",
|
||||
progress_action="wait_for_authorization",
|
||||
description_placeholders={
|
||||
"user_code": self._user_code,
|
||||
"verification_uri": self._verification_uri,
|
||||
"expires_minutes": self._expires_minutes,
|
||||
},
|
||||
progress_task=self.login_task,
|
||||
)
|
||||
|
||||
async def async_step_finish_login(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the finalization of login."""
|
||||
_LOGGER.debug("Finalizing authorization")
|
||||
assert self._api is not None
|
||||
|
||||
try:
|
||||
user_data = await self._api.get_user_info()
|
||||
except ActronNeoAuthError as err:
|
||||
_LOGGER.error("Error getting user info: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
unique_id = str(user_data["id"])
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_data["email"],
|
||||
data={CONF_API_TOKEN: self._api.refresh_token_value},
|
||||
)
|
||||
|
||||
async def async_step_timeout(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle issues that need transition await from progress step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="timeout",
|
||||
)
|
||||
del self.login_task
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_connection_error(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle connection error from progress step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="connection_error")
|
||||
|
||||
# Reset state and try again
|
||||
self._api = None
|
||||
self._device_code = None
|
||||
self.login_task = None
|
||||
return await self.async_step_user()
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
6
homeassistant/components/actron_air/const.py
Normal file
6
homeassistant/components/actron_air/const.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Constants used by Actron Air integration."""
|
||||
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
DOMAIN = "actron_air"
|
69
homeassistant/components/actron_air/coordinator.py
Normal file
69
homeassistant/components/actron_air/coordinator.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Coordinator for Actron Air integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from actron_neo_api import ActronAirNeoACSystem, ActronAirNeoStatus, ActronNeoAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import _LOGGER
|
||||
|
||||
STALE_DEVICE_TIMEOUT = timedelta(hours=24)
|
||||
ERROR_NO_SYSTEMS_FOUND = "no_systems_found"
|
||||
ERROR_UNKNOWN = "unknown_error"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActronAirRuntimeData:
|
||||
"""Runtime data for the Actron Air integration."""
|
||||
|
||||
api: ActronNeoAPI
|
||||
system_coordinators: dict[str, ActronAirSystemCoordinator]
|
||||
|
||||
|
||||
type ActronAirConfigEntry = ConfigEntry[ActronAirRuntimeData]
|
||||
|
||||
AUTH_ERROR_THRESHOLD = 3
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]):
|
||||
"""System coordinator for Actron Air integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
api: ActronNeoAPI,
|
||||
system: ActronAirNeoACSystem,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="Actron Air Status",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
self.system = system
|
||||
self.serial_number = system["serial"]
|
||||
self.api = api
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
|
||||
async def _async_update_data(self) -> ActronAirNeoStatus:
|
||||
"""Fetch updates and merge incremental changes into the full state."""
|
||||
await self.api.update_status()
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
return self.status
|
||||
|
||||
def is_device_stale(self) -> bool:
|
||||
"""Check if a device is stale (not seen for a while)."""
|
||||
return (dt_util.utcnow() - self.last_seen) > STALE_DEVICE_TIMEOUT
|
16
homeassistant/components/actron_air/manifest.json
Normal file
16
homeassistant/components/actron_air/manifest.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"domain": "actron_air",
|
||||
"name": "Actron Air",
|
||||
"codeowners": ["@kclif9", "@JagadishDhanamjayam"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "neo-*",
|
||||
"macaddress": "FC0FE7*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.1.84"]
|
||||
}
|
78
homeassistant/components/actron_air/quality_scale.yaml
Normal file
78
homeassistant/components/actron_air/quality_scale.yaml
Normal file
@@ -0,0 +1,78 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: This integration does not have custom service actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: This integration does not have custom service actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: This integration does not subscribe to external events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options flow
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: This integration uses DHCP discovery, however is cloud polling. Therefore there is no information to update.
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: todo
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: This integration does not use entity categories.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: This integration does not use entity device classes.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: Not required for this integration at this stage.
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration does not have any known issues that require repair.
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
29
homeassistant/components/actron_air/strings.json
Normal file
29
homeassistant/components/actron_air/strings.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Actron Air OAuth2 Authorization"
|
||||
},
|
||||
"timeout": {
|
||||
"title": "Authorization timeout",
|
||||
"description": "The authorization process timed out. Please try again.",
|
||||
"data": {}
|
||||
},
|
||||
"connection_error": {
|
||||
"title": "Connection error",
|
||||
"description": "Failed to connect to Actron Air. Please check your internet connection and try again.",
|
||||
"data": {}
|
||||
}
|
||||
},
|
||||
"progress": {
|
||||
"wait_for_authorization": "To authenticate, open the following URL and login at Actron Air:\n{verification_uri}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{user_code}```\n\n\nThe login attempt will time out after {expires_minutes} minutes."
|
||||
},
|
||||
"error": {
|
||||
"oauth2_error": "Failed to start OAuth2 flow. Please try again later."
|
||||
},
|
||||
"abort": {
|
||||
"oauth2_error": "Failed to start OAuth2 flow",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
}
|
||||
}
|
||||
}
|
@@ -71,7 +71,14 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=schema,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"api_key_url": "https://opendata.aemet.es/centrodedescargas/altaUsuario"
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
|
@@ -14,7 +14,7 @@
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"name": "Name of the integration"
|
||||
},
|
||||
"description": "To generate API key go to https://opendata.aemet.es/centrodedescargas/altaUsuario"
|
||||
"description": "To generate API key go to {api_key_url}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -18,6 +18,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS
|
||||
|
||||
DESCRIPTION_PLACEHOLDERS = {
|
||||
"developer_registration_url": "https://developer.airly.eu/register",
|
||||
}
|
||||
|
||||
|
||||
class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Airly."""
|
||||
@@ -85,6 +89,7 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "To generate API key go to https://developer.airly.eu/register",
|
||||
"description": "To generate API key go to {developer_registration_url}",
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
|
@@ -36,6 +36,11 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
|
||||
identifiers={(DOMAIN, str(airos_data.host.device_id))},
|
||||
manufacturer=MANUFACTURER,
|
||||
model=airos_data.host.devmodel,
|
||||
model_id=(
|
||||
sku
|
||||
if (sku := airos_data.derived.sku) not in ["UNKNOWN", "AMBIGUOUS"]
|
||||
else None
|
||||
),
|
||||
name=airos_data.host.hostname,
|
||||
sw_version=airos_data.host.fwversion,
|
||||
)
|
||||
|
@@ -4,7 +4,8 @@
|
||||
"codeowners": ["@CoMPaTech"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.5"]
|
||||
"requirements": ["airos==0.5.6"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.4.6"]
|
||||
"requirements": ["aioairq==0.4.7"]
|
||||
}
|
||||
|
@@ -16,10 +16,12 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
Platform,
|
||||
UnitOfPressure,
|
||||
UnitOfSoundPressure,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -112,6 +114,21 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"lux": SensorEntityDescription(
|
||||
key="lux",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"noise": SensorEntityDescription(
|
||||
key="noise",
|
||||
translation_key="ambient_noise",
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
@@ -41,6 +41,9 @@
|
||||
},
|
||||
"illuminance": {
|
||||
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
||||
},
|
||||
"ambient_noise": {
|
||||
"name": "Ambient noise"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Final, final
|
||||
from typing import Any, Final, final
|
||||
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
@@ -28,8 +27,6 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.config_validation import make_entity_service_schema
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.entity_platform import EntityPlatform
|
||||
from homeassistant.helpers.frame import ReportBehavior, report_usage
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -149,68 +146,11 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
)
|
||||
_alarm_control_panel_option_default_code: str | None = None
|
||||
|
||||
__alarm_legacy_state: bool = False
|
||||
|
||||
def __init_subclass__(cls, **kwargs: Any) -> None:
|
||||
"""Post initialisation processing."""
|
||||
super().__init_subclass__(**kwargs)
|
||||
if any(method in cls.__dict__ for method in ("_attr_state", "state")):
|
||||
# Integrations should use the 'alarm_state' property instead of
|
||||
# setting the state directly.
|
||||
cls.__alarm_legacy_state = True
|
||||
|
||||
def __setattr__(self, name: str, value: Any, /) -> None:
|
||||
"""Set attribute.
|
||||
|
||||
Deprecation warning if setting '_attr_state' directly
|
||||
unless already reported.
|
||||
"""
|
||||
if name == "_attr_state":
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
return super().__setattr__(name, value)
|
||||
|
||||
@callback
|
||||
def add_to_platform_start(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
platform: EntityPlatform,
|
||||
parallel_updates: asyncio.Semaphore | None,
|
||||
) -> None:
|
||||
"""Start adding an entity to a platform."""
|
||||
super().add_to_platform_start(hass, platform, parallel_updates)
|
||||
if self.__alarm_legacy_state:
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
|
||||
@callback
|
||||
def _report_deprecated_alarm_state_handling(self) -> None:
|
||||
"""Report on deprecated handling of alarm state.
|
||||
|
||||
Integrations should implement alarm_state instead of using state directly.
|
||||
"""
|
||||
report_usage(
|
||||
"is setting state directly."
|
||||
f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'"
|
||||
" property and return its state using the AlarmControlPanelState enum",
|
||||
core_integration_behavior=ReportBehavior.ERROR,
|
||||
custom_integration_behavior=ReportBehavior.LOG,
|
||||
breaks_in_ha_version="2025.11",
|
||||
integration_domain=self.platform.platform_name if self.platform else None,
|
||||
exclude_integrations={DOMAIN},
|
||||
)
|
||||
|
||||
@final
|
||||
@property
|
||||
def state(self) -> str | None:
|
||||
"""Return the current state."""
|
||||
if (alarm_state := self.alarm_state) is not None:
|
||||
return alarm_state
|
||||
if self._attr_state is not None:
|
||||
# Backwards compatibility for integrations that set state directly
|
||||
# Should be removed in 2025.11
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self._attr_state, str)
|
||||
return self._attr_state
|
||||
return None
|
||||
return self.alarm_state
|
||||
|
||||
@cached_property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
|
@@ -1472,10 +1472,10 @@ class AlexaModeController(AlexaCapability):
|
||||
# Return state instead of position when using ModeController.
|
||||
mode = self.entity.state
|
||||
if mode in (
|
||||
cover.STATE_OPEN,
|
||||
cover.STATE_OPENING,
|
||||
cover.STATE_CLOSED,
|
||||
cover.STATE_CLOSING,
|
||||
cover.CoverState.OPEN,
|
||||
cover.CoverState.OPENING,
|
||||
cover.CoverState.CLOSED,
|
||||
cover.CoverState.CLOSING,
|
||||
STATE_UNKNOWN,
|
||||
):
|
||||
return f"{cover.ATTR_POSITION}.{mode}"
|
||||
@@ -1594,11 +1594,11 @@ class AlexaModeController(AlexaCapability):
|
||||
["Position", AlexaGlobalCatalog.SETTING_OPENING], False
|
||||
)
|
||||
self._resource.add_mode(
|
||||
f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}",
|
||||
f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}",
|
||||
[AlexaGlobalCatalog.VALUE_OPEN],
|
||||
)
|
||||
self._resource.add_mode(
|
||||
f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}",
|
||||
f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}",
|
||||
[AlexaGlobalCatalog.VALUE_CLOSE],
|
||||
)
|
||||
self._resource.add_mode(
|
||||
@@ -1651,22 +1651,22 @@ class AlexaModeController(AlexaCapability):
|
||||
raise_labels.append(AlexaSemantics.ACTION_OPEN)
|
||||
self._semantics.add_states_to_value(
|
||||
[AlexaSemantics.STATES_CLOSED],
|
||||
f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}",
|
||||
f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}",
|
||||
)
|
||||
self._semantics.add_states_to_value(
|
||||
[AlexaSemantics.STATES_OPEN],
|
||||
f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}",
|
||||
f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}",
|
||||
)
|
||||
|
||||
self._semantics.add_action_to_directive(
|
||||
lower_labels,
|
||||
"SetMode",
|
||||
{"mode": f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}"},
|
||||
{"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}"},
|
||||
)
|
||||
self._semantics.add_action_to_directive(
|
||||
raise_labels,
|
||||
"SetMode",
|
||||
{"mode": f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}"},
|
||||
{"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}"},
|
||||
)
|
||||
|
||||
return self._semantics.serialize_semantics()
|
||||
|
@@ -1261,9 +1261,9 @@ async def async_api_set_mode(
|
||||
elif instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
|
||||
position = mode.split(".")[1]
|
||||
|
||||
if position == cover.STATE_CLOSED:
|
||||
if position == cover.CoverState.CLOSED:
|
||||
service = cover.SERVICE_CLOSE_COVER
|
||||
elif position == cover.STATE_OPEN:
|
||||
elif position == cover.CoverState.OPEN:
|
||||
service = cover.SERVICE_OPEN_COVER
|
||||
elif position == "custom":
|
||||
service = cover.SERVICE_STOP_COVER
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.9"]
|
||||
"requirements": ["aioamazondevices==6.4.3"]
|
||||
}
|
||||
|
@@ -4,12 +4,15 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components.zone import ENTITY_ID_HOME
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigEntryState,
|
||||
@@ -18,7 +21,13 @@ from homeassistant.config_entries import (
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME
|
||||
from homeassistant.const import (
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_API_KEY,
|
||||
CONF_LLM_HASS_API,
|
||||
CONF_NAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.selector import (
|
||||
@@ -37,12 +46,23 @@ from .const import (
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
RECOMMENDED_WEB_SEARCH,
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -168,6 +188,14 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH):
|
||||
model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||
errors[CONF_WEB_SEARCH] = "web_search_unsupported_model"
|
||||
elif user_input.get(
|
||||
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||
):
|
||||
user_input.update(await self._get_location_data())
|
||||
|
||||
if not errors:
|
||||
if self._is_new:
|
||||
@@ -215,6 +243,68 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
errors=errors or None,
|
||||
)
|
||||
|
||||
async def _get_location_data(self) -> dict[str, str]:
|
||||
"""Get approximate location data of the user."""
|
||||
location_data: dict[str, str] = {}
|
||||
zone_home = self.hass.states.get(ENTITY_ID_HOME)
|
||||
if zone_home is not None:
|
||||
client = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
anthropic.AsyncAnthropic,
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
)
|
||||
)
|
||||
location_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
description="Free text input for the city, e.g. `San Francisco`",
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
description="Free text input for the region, e.g. `California`",
|
||||
): str,
|
||||
}
|
||||
)
|
||||
response = await client.messages.create(
|
||||
model=RECOMMENDED_CHAT_MODEL,
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Where are the following coordinates located: "
|
||||
f"({zone_home.attributes[ATTR_LATITUDE]},"
|
||||
f" {zone_home.attributes[ATTR_LONGITUDE]})? Please respond "
|
||||
"only with a JSON object using the following schema:\n"
|
||||
f"{convert(location_schema)}",
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "{", # hints the model to skip any preamble
|
||||
},
|
||||
],
|
||||
max_tokens=RECOMMENDED_MAX_TOKENS,
|
||||
)
|
||||
_LOGGER.debug("Model response: %s", response.content)
|
||||
location_data = location_schema(
|
||||
json.loads(
|
||||
"{"
|
||||
+ "".join(
|
||||
block.text
|
||||
for block in response.content
|
||||
if isinstance(block, anthropic.types.TextBlock)
|
||||
)
|
||||
)
|
||||
or {}
|
||||
)
|
||||
|
||||
if self.hass.config.country:
|
||||
location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country
|
||||
location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone
|
||||
|
||||
_LOGGER.debug("Location data: %s", location_data)
|
||||
|
||||
return location_data
|
||||
|
||||
async_step_user = async_step_set_options
|
||||
async_step_reconfigure = async_step_set_options
|
||||
|
||||
@@ -273,6 +363,18 @@ def anthropic_config_option_schema(
|
||||
CONF_THINKING_BUDGET,
|
||||
default=RECOMMENDED_THINKING_BUDGET,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=RECOMMENDED_WEB_SEARCH,
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
@@ -18,9 +18,26 @@ RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
CONF_WEB_SEARCH = "web_search"
|
||||
RECOMMENDED_WEB_SEARCH = False
|
||||
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
|
||||
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES = 5
|
||||
CONF_WEB_SEARCH_CITY = "city"
|
||||
CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-5", # Both sonnet and haiku
|
||||
"claude-3-opus",
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-3-haiku",
|
||||
"claude-3-opus",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
]
|
||||
|
@@ -1,12 +1,17 @@
|
||||
"""Base entity for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
from dataclasses import dataclass, field
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic.types import (
|
||||
CitationsDelta,
|
||||
CitationsWebSearchResultLocation,
|
||||
CitationWebSearchResultLocationParam,
|
||||
ContentBlockParam,
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
@@ -16,11 +21,16 @@ from anthropic.types import (
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
ServerToolUseBlock,
|
||||
ServerToolUseBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextCitation,
|
||||
TextCitationParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
@@ -29,9 +39,15 @@ from anthropic.types import (
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUnionParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
WebSearchTool20250305Param,
|
||||
WebSearchToolRequestErrorParam,
|
||||
WebSearchToolResultBlock,
|
||||
WebSearchToolResultBlockParam,
|
||||
WebSearchToolResultError,
|
||||
)
|
||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||
from voluptuous_openapi import convert
|
||||
@@ -48,6 +64,13 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
@@ -73,6 +96,69 @@ def _format_tool(
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class CitationDetails:
|
||||
"""Citation details for a content part."""
|
||||
|
||||
index: int = 0
|
||||
"""Start position of the text."""
|
||||
|
||||
length: int = 0
|
||||
"""Length of the relevant data."""
|
||||
|
||||
citations: list[TextCitationParam] = field(default_factory=list)
|
||||
"""Citations for the content part."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ContentDetails:
|
||||
"""Native data for AssistantContent."""
|
||||
|
||||
citation_details: list[CitationDetails] = field(default_factory=list)
|
||||
|
||||
def has_content(self) -> bool:
|
||||
"""Check if there is any content."""
|
||||
return any(detail.length > 0 for detail in self.citation_details)
|
||||
|
||||
def has_citations(self) -> bool:
|
||||
"""Check if there are any citations."""
|
||||
return any(detail.citations for detail in self.citation_details)
|
||||
|
||||
def add_citation_detail(self) -> None:
|
||||
"""Add a new citation detail."""
|
||||
if not self.citation_details or self.citation_details[-1].length > 0:
|
||||
self.citation_details.append(
|
||||
CitationDetails(
|
||||
index=self.citation_details[-1].index
|
||||
+ self.citation_details[-1].length
|
||||
if self.citation_details
|
||||
else 0
|
||||
)
|
||||
)
|
||||
|
||||
def add_citation(self, citation: TextCitation) -> None:
|
||||
"""Add a citation to the current detail."""
|
||||
if not self.citation_details:
|
||||
self.citation_details.append(CitationDetails())
|
||||
citation_param: TextCitationParam | None = None
|
||||
if isinstance(citation, CitationsWebSearchResultLocation):
|
||||
citation_param = CitationWebSearchResultLocationParam(
|
||||
type="web_search_result_location",
|
||||
title=citation.title,
|
||||
url=citation.url,
|
||||
cited_text=citation.cited_text,
|
||||
encrypted_index=citation.encrypted_index,
|
||||
)
|
||||
if citation_param:
|
||||
self.citation_details[-1].citations.append(citation_param)
|
||||
|
||||
def delete_empty(self) -> None:
|
||||
"""Delete empty citation details."""
|
||||
self.citation_details = [
|
||||
detail for detail in self.citation_details if detail.citations
|
||||
]
|
||||
|
||||
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
@@ -81,15 +167,31 @@ def _convert_content(
|
||||
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
if content.tool_name == "web_search":
|
||||
tool_result_block: ContentBlockParam = WebSearchToolResultBlockParam(
|
||||
type="web_search_tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=content.tool_result["content"]
|
||||
if "content" in content.tool_result
|
||||
else WebSearchToolRequestErrorParam(
|
||||
type="web_search_tool_result_error",
|
||||
error_code=content.tool_result.get("error_code", "unavailable"), # type: ignore[typeddict-item]
|
||||
),
|
||||
)
|
||||
external_tool = True
|
||||
else:
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
external_tool = False
|
||||
if not messages or messages[-1]["role"] != (
|
||||
"assistant" if external_tool else "user"
|
||||
):
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
role="assistant" if external_tool else "user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
)
|
||||
@@ -151,13 +253,56 @@ def _convert_content(
|
||||
redacted_thinking_block
|
||||
)
|
||||
if content.content:
|
||||
current_index = 0
|
||||
for detail in (
|
||||
content.native.citation_details
|
||||
if isinstance(content.native, ContentDetails)
|
||||
else [CitationDetails(length=len(content.content))]
|
||||
):
|
||||
if detail.index > current_index:
|
||||
# Add text block for any text without citations
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=content.content[current_index : detail.index],
|
||||
)
|
||||
)
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=content.content[
|
||||
detail.index : detail.index + detail.length
|
||||
],
|
||||
citations=detail.citations,
|
||||
)
|
||||
if detail.citations
|
||||
else TextBlockParam(
|
||||
type="text",
|
||||
text=content.content[
|
||||
detail.index : detail.index + detail.length
|
||||
],
|
||||
)
|
||||
)
|
||||
current_index = detail.index + detail.length
|
||||
if current_index < len(content.content):
|
||||
# Add text block for any remaining text without citations
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=content.content[current_index:],
|
||||
)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ToolUseBlockParam(
|
||||
ServerToolUseBlockParam(
|
||||
type="server_tool_use",
|
||||
id=tool_call.id,
|
||||
name="web_search",
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
if tool_call.external and tool_call.tool_name == "web_search"
|
||||
else ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
@@ -173,10 +318,12 @@ def _convert_content(
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
stream: AsyncStream[MessageStreamEvent],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
) -> AsyncGenerator[
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
@@ -209,11 +356,13 @@ async def _transform_stream(
|
||||
if stream is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_tool_block: ToolUseBlockParam | None = None
|
||||
current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None
|
||||
current_tool_args: str
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
input_usage: Usage | None = None
|
||||
has_content = False
|
||||
has_native = False
|
||||
first_block: bool
|
||||
|
||||
async for response in stream:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
@@ -222,6 +371,7 @@ async def _transform_stream(
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
input_usage = response.message.usage
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_tool_block = ToolUseBlockParam(
|
||||
@@ -232,17 +382,37 @@ async def _transform_stream(
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
if has_content:
|
||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||
first_block
|
||||
or (
|
||||
not content_details.has_citations()
|
||||
and response.content_block.citations is None
|
||||
and content_details.has_content()
|
||||
)
|
||||
):
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
has_content = True
|
||||
first_block = False
|
||||
content_details.add_citation_detail()
|
||||
if response.content_block.text:
|
||||
content_details.citation_details[-1].length += len(
|
||||
response.content_block.text
|
||||
)
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
if has_native:
|
||||
if first_block or has_native:
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
has_content = False
|
||||
first_block = False
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
@@ -250,15 +420,60 @@ async def _transform_stream(
|
||||
"responses"
|
||||
)
|
||||
if has_native:
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
has_content = False
|
||||
first_block = False
|
||||
yield {"native": response.content_block}
|
||||
has_native = True
|
||||
elif isinstance(response.content_block, ServerToolUseBlock):
|
||||
current_tool_block = ServerToolUseBlockParam(
|
||||
type="server_tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {
|
||||
"role": "tool_result",
|
||||
"tool_call_id": response.content_block.tool_use_id,
|
||||
"tool_name": "web_search",
|
||||
"tool_result": {
|
||||
"type": "web_search_tool_result_error",
|
||||
"error_code": response.content_block.content.error_code,
|
||||
}
|
||||
if isinstance(
|
||||
response.content_block.content, WebSearchToolResultError
|
||||
)
|
||||
else {
|
||||
"content": [
|
||||
{
|
||||
"type": "web_search_result",
|
||||
"encrypted_content": block.encrypted_content,
|
||||
"page_age": block.page_age,
|
||||
"title": block.title,
|
||||
"url": block.url,
|
||||
}
|
||||
for block in response.content_block.content
|
||||
]
|
||||
},
|
||||
}
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
content_details.citation_details[-1].length += len(response.delta.text)
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
yield {"thinking_content": response.delta.thinking}
|
||||
@@ -271,6 +486,8 @@ async def _transform_stream(
|
||||
)
|
||||
}
|
||||
has_native = True
|
||||
elif isinstance(response.delta, CitationsDelta):
|
||||
content_details.add_citation(response.delta.citation)
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_tool_block is not None:
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
@@ -281,6 +498,7 @@ async def _transform_stream(
|
||||
id=current_tool_block["id"],
|
||||
tool_name=current_tool_block["name"],
|
||||
tool_args=tool_args,
|
||||
external=current_tool_block["type"] == "server_tool_use",
|
||||
)
|
||||
]
|
||||
}
|
||||
@@ -290,6 +508,12 @@ async def _transform_stream(
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
@@ -337,21 +561,11 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
model_args = MessageCreateParamsStreaming(
|
||||
@@ -361,8 +575,8 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
system=system.content,
|
||||
stream=True,
|
||||
)
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
@@ -376,6 +590,34 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
tools: list[ToolUnionParam] = []
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
if options.get(CONF_WEB_SEARCH):
|
||||
web_search = WebSearchTool20250305Param(
|
||||
name="web_search",
|
||||
type="web_search_20250305",
|
||||
max_uses=options.get(CONF_WEB_SEARCH_MAX_USES),
|
||||
)
|
||||
if options.get(CONF_WEB_SEARCH_USER_LOCATION):
|
||||
web_search["user_location"] = {
|
||||
"type": "approximate",
|
||||
"city": options.get(CONF_WEB_SEARCH_CITY, ""),
|
||||
"region": options.get(CONF_WEB_SEARCH_REGION, ""),
|
||||
"country": options.get(CONF_WEB_SEARCH_COUNTRY, ""),
|
||||
"timezone": options.get(CONF_WEB_SEARCH_TIMEZONE, ""),
|
||||
}
|
||||
tools.append(web_search)
|
||||
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
|
@@ -35,11 +35,17 @@
|
||||
"temperature": "Temperature",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"recommended": "Recommended model settings",
|
||||
"thinking_budget_tokens": "Thinking budget"
|
||||
"thinking_budget": "Thinking budget",
|
||||
"web_search": "Enable web search",
|
||||
"web_search_max_uses": "Maximum web searches",
|
||||
"user_location": "Include home location"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
||||
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||
"web_search_max_uses": "Limit the number of searches performed per response",
|
||||
"user_location": "Localize search results based on home location"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -48,7 +54,8 @@
|
||||
"entry_not_loaded": "Cannot add things while the configuration is disabled."
|
||||
},
|
||||
"error": {
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.",
|
||||
"web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,6 +7,8 @@ from typing import Any
|
||||
from pyaprilaire.const import Attribute
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
FAN_AUTO,
|
||||
FAN_ON,
|
||||
PRESET_AWAY,
|
||||
@@ -16,7 +18,12 @@ from homeassistant.components.climate import (
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature
|
||||
from homeassistant.const import (
|
||||
ATTR_TEMPERATURE,
|
||||
PRECISION_HALVES,
|
||||
PRECISION_WHOLE,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -232,15 +239,15 @@ class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
|
||||
cool_setpoint = 0
|
||||
heat_setpoint = 0
|
||||
|
||||
if temperature := kwargs.get("temperature"):
|
||||
if temperature := kwargs.get(ATTR_TEMPERATURE):
|
||||
if self.coordinator.data.get(Attribute.MODE) == 3:
|
||||
cool_setpoint = temperature
|
||||
else:
|
||||
heat_setpoint = temperature
|
||||
else:
|
||||
if target_temp_low := kwargs.get("target_temp_low"):
|
||||
if target_temp_low := kwargs.get(ATTR_TARGET_TEMP_LOW):
|
||||
heat_setpoint = target_temp_low
|
||||
if target_temp_high := kwargs.get("target_temp_high"):
|
||||
if target_temp_high := kwargs.get(ATTR_TARGET_TEMP_HIGH):
|
||||
cool_setpoint = target_temp_high
|
||||
|
||||
if cool_setpoint == 0 and heat_setpoint == 0:
|
||||
|
@@ -7,13 +7,13 @@ from collections import namedtuple
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
import functools
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
||||
from aiohttp import ClientSession
|
||||
from asusrouter import AsusRouter, AsusRouterError
|
||||
from asusrouter.config import ARConfigKey
|
||||
from asusrouter.modules.client import AsusClient
|
||||
from asusrouter.modules.client import AsusClient, ConnectionState
|
||||
from asusrouter.modules.data import AsusData
|
||||
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
|
||||
from asusrouter.tools.connection import get_cookie_jar
|
||||
@@ -219,7 +219,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Get connected status."""
|
||||
return cast(bool, self._api.is_connected)
|
||||
return self._api.is_connected
|
||||
|
||||
async def async_connect(self) -> None:
|
||||
"""Connect to the device."""
|
||||
@@ -235,8 +235,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
||||
|
||||
async def async_disconnect(self) -> None:
|
||||
"""Disconnect to the device."""
|
||||
if self._api is not None and self._protocol == PROTOCOL_TELNET:
|
||||
self._api.connection.disconnect()
|
||||
await self._api.async_disconnect()
|
||||
|
||||
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
||||
"""Get list of connected devices."""
|
||||
@@ -437,6 +436,7 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
if dev.connection is not None
|
||||
and dev.description is not None
|
||||
and dev.connection.ip_address is not None
|
||||
and dev.state is ConnectionState.CONNECTED
|
||||
}
|
||||
|
||||
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.21.0"]
|
||||
"requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"]
|
||||
}
|
||||
|
@@ -36,11 +36,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
|
||||
raise ConfigEntryAuthFailed("Migration to OAuth required")
|
||||
|
||||
session = async_create_august_clientsession(hass)
|
||||
try:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
except ValueError as err:
|
||||
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
||||
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||
try:
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/autarco",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["autarco==3.1.0"]
|
||||
"requirements": ["autarco==3.2.0"]
|
||||
}
|
||||
|
@@ -57,6 +57,7 @@ from .api import (
|
||||
_get_manager,
|
||||
async_address_present,
|
||||
async_ble_device_from_address,
|
||||
async_clear_address_from_match_history,
|
||||
async_current_scanners,
|
||||
async_discovered_service_info,
|
||||
async_get_advertisement_callback,
|
||||
@@ -115,6 +116,7 @@ __all__ = [
|
||||
"HomeAssistantRemoteScanner",
|
||||
"async_address_present",
|
||||
"async_ble_device_from_address",
|
||||
"async_clear_address_from_match_history",
|
||||
"async_current_scanners",
|
||||
"async_discovered_service_info",
|
||||
"async_get_advertisement_callback",
|
||||
|
@@ -193,6 +193,20 @@ def async_rediscover_address(hass: HomeAssistant, address: str) -> None:
|
||||
_get_manager(hass).async_rediscover_address(address)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_clear_address_from_match_history(hass: HomeAssistant, address: str) -> None:
|
||||
"""Clear an address from the integration matcher history.
|
||||
|
||||
This allows future advertisements from this address to trigger discovery
|
||||
even if the advertisement content has changed but the service data UUIDs
|
||||
remain the same.
|
||||
|
||||
Unlike async_rediscover_address, this does not immediately re-trigger
|
||||
discovery with the current advertisement in history.
|
||||
"""
|
||||
_get_manager(hass).async_clear_address_from_match_history(address)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_register_scanner(
|
||||
hass: HomeAssistant,
|
||||
|
@@ -120,6 +120,19 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
if service_info := self._all_history.get(address):
|
||||
self._async_trigger_matching_discovery(service_info)
|
||||
|
||||
@hass_callback
|
||||
def async_clear_address_from_match_history(self, address: str) -> None:
|
||||
"""Clear an address from the integration matcher history.
|
||||
|
||||
This allows future advertisements from this address to trigger discovery
|
||||
even if the advertisement content has changed but the service data UUIDs
|
||||
remain the same.
|
||||
|
||||
Unlike async_rediscover_address, this does not immediately re-trigger
|
||||
discovery with the current advertisement in history.
|
||||
"""
|
||||
self._integration_matcher.async_clear_address(address)
|
||||
|
||||
def _discover_service_info(self, service_info: BluetoothServiceInfoBleak) -> None:
|
||||
matched_domains = self._integration_matcher.match_domains(service_info)
|
||||
if self._debug:
|
||||
|
@@ -68,12 +68,17 @@ class IntegrationMatchHistory:
|
||||
manufacturer_data: bool
|
||||
service_data: set[str]
|
||||
service_uuids: set[str]
|
||||
name: str
|
||||
|
||||
|
||||
def seen_all_fields(
|
||||
previous_match: IntegrationMatchHistory, advertisement_data: AdvertisementData
|
||||
previous_match: IntegrationMatchHistory,
|
||||
advertisement_data: AdvertisementData,
|
||||
name: str,
|
||||
) -> bool:
|
||||
"""Return if we have seen all fields."""
|
||||
if previous_match.name != name:
|
||||
return False
|
||||
if not previous_match.manufacturer_data and advertisement_data.manufacturer_data:
|
||||
return False
|
||||
if advertisement_data.service_data and (
|
||||
@@ -122,10 +127,11 @@ class IntegrationMatcher:
|
||||
device = service_info.device
|
||||
advertisement_data = service_info.advertisement
|
||||
connectable = service_info.connectable
|
||||
name = service_info.name
|
||||
matched = self._matched_connectable if connectable else self._matched
|
||||
matched_domains: set[str] = set()
|
||||
if (previous_match := matched.get(device.address)) and seen_all_fields(
|
||||
previous_match, advertisement_data
|
||||
previous_match, advertisement_data, name
|
||||
):
|
||||
# We have seen all fields so we can skip the rest of the matchers
|
||||
return matched_domains
|
||||
@@ -140,11 +146,13 @@ class IntegrationMatcher:
|
||||
)
|
||||
previous_match.service_data |= set(advertisement_data.service_data)
|
||||
previous_match.service_uuids |= set(advertisement_data.service_uuids)
|
||||
previous_match.name = name
|
||||
else:
|
||||
matched[device.address] = IntegrationMatchHistory(
|
||||
manufacturer_data=bool(advertisement_data.manufacturer_data),
|
||||
service_data=set(advertisement_data.service_data),
|
||||
service_uuids=set(advertisement_data.service_uuids),
|
||||
name=name,
|
||||
)
|
||||
return matched_domains
|
||||
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"requirements": ["brother==5.1.0"],
|
||||
"requirements": ["brother==5.1.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
@@ -7,12 +7,14 @@ from typing import Any
|
||||
from evolutionhttp import BryantEvolutionLocalClient
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -208,24 +210,24 @@ class BryantEvolutionClimate(ClimateEntity):
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if kwargs.get("target_temp_high"):
|
||||
temp = int(kwargs["target_temp_high"])
|
||||
if value := kwargs.get(ATTR_TARGET_TEMP_HIGH):
|
||||
temp = int(value)
|
||||
if not await self._client.set_cooling_setpoint(temp):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="failed_to_set_clsp"
|
||||
)
|
||||
self._attr_target_temperature_high = temp
|
||||
|
||||
if kwargs.get("target_temp_low"):
|
||||
temp = int(kwargs["target_temp_low"])
|
||||
if value := kwargs.get(ATTR_TARGET_TEMP_LOW):
|
||||
temp = int(value)
|
||||
if not await self._client.set_heating_setpoint(temp):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="failed_to_set_htsp"
|
||||
)
|
||||
self._attr_target_temperature_low = temp
|
||||
|
||||
if kwargs.get("temperature"):
|
||||
temp = int(kwargs["temperature"])
|
||||
if value := kwargs.get(ATTR_TEMPERATURE):
|
||||
temp = int(value)
|
||||
fn = (
|
||||
self._client.set_heating_setpoint
|
||||
if self.hvac_mode == HVACMode.HEAT
|
||||
|
@@ -169,7 +169,7 @@ class CalendarEventListener:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
job: HassJob[..., Coroutine[Any, Any, None]],
|
||||
job: HassJob[..., Coroutine[Any, Any, None] | Any],
|
||||
trigger_data: dict[str, Any],
|
||||
fetcher: QueuedEventFetcher,
|
||||
) -> None:
|
||||
|
@@ -4,5 +4,6 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/citybikes",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy"
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["python-citybikes==0.3.3"]
|
||||
}
|
||||
|
@@ -5,8 +5,11 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import aiohttp
|
||||
from citybikes import __version__ as CITYBIKES_CLIENT_VERSION
|
||||
from citybikes.asyncio import Client as CitybikesClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -15,21 +18,18 @@ from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_ID,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LOCATION,
|
||||
ATTR_LONGITUDE,
|
||||
ATTR_NAME,
|
||||
APPLICATION_NAME,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
CONF_NAME,
|
||||
CONF_RADIUS,
|
||||
EVENT_HOMEASSISTANT_CLOSE,
|
||||
UnitOfLength,
|
||||
__version__,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
@@ -40,31 +40,33 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_EMPTY_SLOTS = "empty_slots"
|
||||
ATTR_EXTRA = "extra"
|
||||
ATTR_FREE_BIKES = "free_bikes"
|
||||
ATTR_NETWORK = "network"
|
||||
ATTR_NETWORKS_LIST = "networks"
|
||||
ATTR_STATIONS_LIST = "stations"
|
||||
ATTR_TIMESTAMP = "timestamp"
|
||||
HA_USER_AGENT = (
|
||||
f"{APPLICATION_NAME}/{__version__} "
|
||||
f"python-citybikes/{CITYBIKES_CLIENT_VERSION} "
|
||||
f"Python/{sys.version_info[0]}.{sys.version_info[1]}"
|
||||
)
|
||||
|
||||
ATTR_UID = "uid"
|
||||
ATTR_LATITUDE = "latitude"
|
||||
ATTR_LONGITUDE = "longitude"
|
||||
ATTR_EMPTY_SLOTS = "empty_slots"
|
||||
ATTR_TIMESTAMP = "timestamp"
|
||||
|
||||
CONF_NETWORK = "network"
|
||||
CONF_STATIONS_LIST = "stations"
|
||||
|
||||
DEFAULT_ENDPOINT = "https://api.citybik.es/{uri}"
|
||||
PLATFORM = "citybikes"
|
||||
|
||||
MONITORED_NETWORKS = "monitored-networks"
|
||||
|
||||
DATA_CLIENT = "client"
|
||||
|
||||
NETWORKS_URI = "v2/networks"
|
||||
|
||||
REQUEST_TIMEOUT = 5 # In seconds; argument to asyncio.timeout
|
||||
REQUEST_TIMEOUT = aiohttp.ClientTimeout(total=5)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5) # Timely, and doesn't suffocate the API
|
||||
|
||||
STATIONS_URI = "v2/networks/{uid}?fields=network.stations"
|
||||
|
||||
CITYBIKES_ATTRIBUTION = (
|
||||
"Information provided by the CityBikes Project (https://citybik.es/#about)"
|
||||
)
|
||||
@@ -87,72 +89,6 @@ PLATFORM_SCHEMA = vol.All(
|
||||
),
|
||||
)
|
||||
|
||||
NETWORK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ID): cv.string,
|
||||
vol.Required(ATTR_NAME): cv.string,
|
||||
vol.Required(ATTR_LOCATION): vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_LATITUDE): cv.latitude,
|
||||
vol.Required(ATTR_LONGITUDE): cv.longitude,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
NETWORKS_RESPONSE_SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_NETWORKS_LIST): [NETWORK_SCHEMA]}
|
||||
)
|
||||
|
||||
STATION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_FREE_BIKES): cv.positive_int,
|
||||
vol.Required(ATTR_EMPTY_SLOTS): vol.Any(cv.positive_int, None),
|
||||
vol.Required(ATTR_LATITUDE): cv.latitude,
|
||||
vol.Required(ATTR_LONGITUDE): cv.longitude,
|
||||
vol.Required(ATTR_ID): cv.string,
|
||||
vol.Required(ATTR_NAME): cv.string,
|
||||
vol.Required(ATTR_TIMESTAMP): cv.string,
|
||||
vol.Optional(ATTR_EXTRA): vol.Schema(
|
||||
{vol.Optional(ATTR_UID): cv.string}, extra=vol.REMOVE_EXTRA
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
STATIONS_RESPONSE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NETWORK): vol.Schema(
|
||||
{vol.Required(ATTR_STATIONS_LIST): [STATION_SCHEMA]}, extra=vol.REMOVE_EXTRA
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CityBikesRequestError(Exception):
|
||||
"""Error to indicate a CityBikes API request has failed."""
|
||||
|
||||
|
||||
async def async_citybikes_request(hass, uri, schema):
|
||||
"""Perform a request to CityBikes API endpoint, and parse the response."""
|
||||
try:
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
async with asyncio.timeout(REQUEST_TIMEOUT):
|
||||
req = await session.get(DEFAULT_ENDPOINT.format(uri=uri))
|
||||
|
||||
json_response = await req.json()
|
||||
return schema(json_response)
|
||||
except (TimeoutError, aiohttp.ClientError):
|
||||
_LOGGER.error("Could not connect to CityBikes API endpoint")
|
||||
except ValueError:
|
||||
_LOGGER.error("Received non-JSON data from CityBikes API endpoint")
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Received unexpected JSON from CityBikes API endpoint: %s", err)
|
||||
raise CityBikesRequestError
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
@@ -175,6 +111,14 @@ async def async_setup_platform(
|
||||
radius, UnitOfLength.FEET, UnitOfLength.METERS
|
||||
)
|
||||
|
||||
client = CitybikesClient(user_agent=HA_USER_AGENT, timeout=REQUEST_TIMEOUT)
|
||||
hass.data[PLATFORM][DATA_CLIENT] = client
|
||||
|
||||
async def _async_close_client(event):
|
||||
await client.close()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_client)
|
||||
|
||||
# Create a single instance of CityBikesNetworks.
|
||||
networks = hass.data.setdefault(CITYBIKES_NETWORKS, CityBikesNetworks(hass))
|
||||
|
||||
@@ -194,10 +138,10 @@ async def async_setup_platform(
|
||||
devices = []
|
||||
for station in network.stations:
|
||||
dist = location_util.distance(
|
||||
latitude, longitude, station[ATTR_LATITUDE], station[ATTR_LONGITUDE]
|
||||
latitude, longitude, station.latitude, station.longitude
|
||||
)
|
||||
station_id = station[ATTR_ID]
|
||||
station_uid = str(station.get(ATTR_EXTRA, {}).get(ATTR_UID, ""))
|
||||
station_id = station.id
|
||||
station_uid = str(station.extra.get(ATTR_UID, ""))
|
||||
|
||||
if radius > dist or stations_list.intersection((station_id, station_uid)):
|
||||
if name:
|
||||
@@ -216,6 +160,7 @@ class CityBikesNetworks:
|
||||
def __init__(self, hass):
|
||||
"""Initialize the networks instance."""
|
||||
self.hass = hass
|
||||
self.client = hass.data[PLATFORM][DATA_CLIENT]
|
||||
self.networks = None
|
||||
self.networks_loading = asyncio.Condition()
|
||||
|
||||
@@ -224,24 +169,21 @@ class CityBikesNetworks:
|
||||
try:
|
||||
await self.networks_loading.acquire()
|
||||
if self.networks is None:
|
||||
networks = await async_citybikes_request(
|
||||
self.hass, NETWORKS_URI, NETWORKS_RESPONSE_SCHEMA
|
||||
)
|
||||
self.networks = networks[ATTR_NETWORKS_LIST]
|
||||
except CityBikesRequestError as err:
|
||||
self.networks = await self.client.networks.fetch()
|
||||
except aiohttp.ClientError as err:
|
||||
raise PlatformNotReady from err
|
||||
else:
|
||||
result = None
|
||||
minimum_dist = None
|
||||
for network in self.networks:
|
||||
network_latitude = network[ATTR_LOCATION][ATTR_LATITUDE]
|
||||
network_longitude = network[ATTR_LOCATION][ATTR_LONGITUDE]
|
||||
network_latitude = network.location.latitude
|
||||
network_longitude = network.location.longitude
|
||||
dist = location_util.distance(
|
||||
latitude, longitude, network_latitude, network_longitude
|
||||
)
|
||||
if minimum_dist is None or dist < minimum_dist:
|
||||
minimum_dist = dist
|
||||
result = network[ATTR_ID]
|
||||
result = network.id
|
||||
|
||||
return result
|
||||
finally:
|
||||
@@ -257,22 +199,20 @@ class CityBikesNetwork:
|
||||
self.network_id = network_id
|
||||
self.stations = []
|
||||
self.ready = asyncio.Event()
|
||||
self.client = hass.data[PLATFORM][DATA_CLIENT]
|
||||
|
||||
async def async_refresh(self, now=None):
|
||||
"""Refresh the state of the network."""
|
||||
try:
|
||||
network = await async_citybikes_request(
|
||||
self.hass,
|
||||
STATIONS_URI.format(uid=self.network_id),
|
||||
STATIONS_RESPONSE_SCHEMA,
|
||||
)
|
||||
self.stations = network[ATTR_NETWORK][ATTR_STATIONS_LIST]
|
||||
self.ready.set()
|
||||
except CityBikesRequestError as err:
|
||||
if now is not None:
|
||||
self.ready.clear()
|
||||
else:
|
||||
network = await self.client.network(uid=self.network_id).fetch()
|
||||
except aiohttp.ClientError as err:
|
||||
if now is None:
|
||||
raise PlatformNotReady from err
|
||||
self.ready.clear()
|
||||
return
|
||||
|
||||
self.stations = network.stations
|
||||
self.ready.set()
|
||||
|
||||
|
||||
class CityBikesStation(SensorEntity):
|
||||
@@ -290,16 +230,13 @@ class CityBikesStation(SensorEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update station state."""
|
||||
for station in self._network.stations:
|
||||
if station[ATTR_ID] == self._station_id:
|
||||
station_data = station
|
||||
break
|
||||
self._attr_name = station_data.get(ATTR_NAME)
|
||||
self._attr_native_value = station_data.get(ATTR_FREE_BIKES)
|
||||
station = next(s for s in self._network.stations if s.id == self._station_id)
|
||||
self._attr_name = station.name
|
||||
self._attr_native_value = station.free_bikes
|
||||
self._attr_extra_state_attributes = {
|
||||
ATTR_UID: station_data.get(ATTR_EXTRA, {}).get(ATTR_UID),
|
||||
ATTR_LATITUDE: station_data.get(ATTR_LATITUDE),
|
||||
ATTR_LONGITUDE: station_data.get(ATTR_LONGITUDE),
|
||||
ATTR_EMPTY_SLOTS: station_data.get(ATTR_EMPTY_SLOTS),
|
||||
ATTR_TIMESTAMP: station_data.get(ATTR_TIMESTAMP),
|
||||
ATTR_UID: station.extra.get(ATTR_UID),
|
||||
ATTR_LATITUDE: station.latitude,
|
||||
ATTR_LONGITUDE: station.longitude,
|
||||
ATTR_EMPTY_SLOTS: station.empty_slots,
|
||||
ATTR_TIMESTAMP: station.timestamp,
|
||||
}
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.2.0"],
|
||||
"requirements": ["hass-nabucasa==1.3.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -7,14 +7,7 @@ from typing import Any, cast
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
STATE_CLOSED,
|
||||
STATE_CLOSING,
|
||||
STATE_OPEN,
|
||||
STATE_OPENING,
|
||||
CoverDeviceClass,
|
||||
CoverEntity,
|
||||
)
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
@@ -128,9 +121,9 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if (state := await self.async_get_last_state()) is not None:
|
||||
if state.state == STATE_CLOSED:
|
||||
self._last_action = STATE_COVER.index(STATE_CLOSING)
|
||||
if state.state == STATE_OPEN:
|
||||
self._last_action = STATE_COVER.index(STATE_OPENING)
|
||||
if state.state == CoverState.CLOSED:
|
||||
self._last_action = STATE_COVER.index(CoverState.CLOSING)
|
||||
if state.state == CoverState.OPEN:
|
||||
self._last_action = STATE_COVER.index(CoverState.OPENING)
|
||||
|
||||
self._attr_is_closed = state.state == STATE_CLOSED
|
||||
self._attr_is_closed = state.state == CoverState.CLOSED
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==1.1.1"]
|
||||
"requirements": ["aiocomelit==1.1.2"]
|
||||
}
|
||||
|
@@ -138,7 +138,7 @@ def new_device_listener(
|
||||
data_type: str,
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to coordinator updates to check for new devices."""
|
||||
known_devices: set[int] = set()
|
||||
known_devices: dict[str, list[int]] = {}
|
||||
|
||||
def _check_devices() -> None:
|
||||
"""Check for new devices and call callback with any new monitors."""
|
||||
@@ -147,8 +147,8 @@ def new_device_listener(
|
||||
|
||||
new_devices: list[DeviceType] = []
|
||||
for _id in coordinator.data[data_type]:
|
||||
if _id not in known_devices:
|
||||
known_devices.add(_id)
|
||||
if _id not in (id_list := known_devices.get(data_type, [])):
|
||||
known_devices.update({data_type: [*id_list, _id]})
|
||||
new_devices.append(coordinator.data[data_type][_id])
|
||||
|
||||
if new_devices:
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/control4",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyControl4"],
|
||||
"requirements": ["pyControl4==1.2.0"],
|
||||
"requirements": ["pyControl4==1.5.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "c4:director"
|
||||
|
@@ -514,7 +514,7 @@ class ChatLog:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_api: llm.APIInstance | None = None
|
||||
|
||||
if user_llm_hass_api is None:
|
||||
if not user_llm_hass_api:
|
||||
pass
|
||||
elif isinstance(user_llm_hass_api, llm.API):
|
||||
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)
|
||||
|
@@ -38,9 +38,11 @@ from home_assistant_intents import (
|
||||
ErrorKey,
|
||||
FuzzyConfig,
|
||||
FuzzyLanguageResponses,
|
||||
LanguageScores,
|
||||
get_fuzzy_config,
|
||||
get_fuzzy_language,
|
||||
get_intents,
|
||||
get_language_scores,
|
||||
get_languages,
|
||||
)
|
||||
import yaml
|
||||
@@ -59,6 +61,7 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
floor_registry as fr,
|
||||
@@ -343,6 +346,81 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
return result
|
||||
|
||||
async def async_debug_recognize(
|
||||
self, user_input: ConversationInput
|
||||
) -> dict[str, Any] | None:
|
||||
"""Debug recognize from user input."""
|
||||
result_dict: dict[str, Any] | None = None
|
||||
|
||||
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
|
||||
result_dict = {
|
||||
# Matched a user-defined sentence trigger.
|
||||
# We can't provide the response here without executing the
|
||||
# trigger.
|
||||
"match": True,
|
||||
"source": "trigger",
|
||||
"sentence_template": trigger_result.sentence_template or "",
|
||||
}
|
||||
elif intent_result := await self.async_recognize_intent(user_input):
|
||||
successful_match = not intent_result.unmatched_entities
|
||||
result_dict = {
|
||||
# Name of the matching intent (or the closest)
|
||||
"intent": {
|
||||
"name": intent_result.intent.name,
|
||||
},
|
||||
# Slot values that would be received by the intent
|
||||
"slots": { # direct access to values
|
||||
entity_key: entity.text or entity.value
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Extra slot details, such as the originally matched text
|
||||
"details": {
|
||||
entity_key: {
|
||||
"name": entity.name,
|
||||
"value": entity.value,
|
||||
"text": entity.text,
|
||||
}
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Entities/areas/etc. that would be targeted
|
||||
"targets": {},
|
||||
# True if match was successful
|
||||
"match": successful_match,
|
||||
# Text of the sentence template that matched (or was closest)
|
||||
"sentence_template": "",
|
||||
# When match is incomplete, this will contain the best slot guesses
|
||||
"unmatched_slots": _get_unmatched_slots(intent_result),
|
||||
# True if match was not exact
|
||||
"fuzzy_match": False,
|
||||
}
|
||||
|
||||
if successful_match:
|
||||
result_dict["targets"] = {
|
||||
state.entity_id: {"matched": is_matched}
|
||||
for state, is_matched in _get_debug_targets(
|
||||
self.hass, intent_result
|
||||
)
|
||||
}
|
||||
|
||||
if intent_result.intent_sentence is not None:
|
||||
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
||||
|
||||
if intent_result.intent_metadata:
|
||||
# Inspect metadata to determine if this matched a custom sentence
|
||||
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
||||
result_dict["source"] = "custom"
|
||||
result_dict["file"] = intent_result.intent_metadata.get(
|
||||
METADATA_CUSTOM_FILE
|
||||
)
|
||||
else:
|
||||
result_dict["source"] = "builtin"
|
||||
|
||||
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
||||
METADATA_FUZZY_MATCH, False
|
||||
)
|
||||
|
||||
return result_dict
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: ConversationInput,
|
||||
@@ -1529,6 +1607,10 @@ class DefaultAgent(ConversationEntity):
|
||||
return None
|
||||
return response
|
||||
|
||||
async def async_get_language_scores(self) -> dict[str, LanguageScores]:
|
||||
"""Get support scores per language."""
|
||||
return await self.hass.async_add_executor_job(get_language_scores)
|
||||
|
||||
|
||||
def _make_error_result(
|
||||
language: str,
|
||||
@@ -1725,3 +1807,75 @@ def _collect_list_references(expression: Expression, list_names: set[str]) -> No
|
||||
elif isinstance(expression, ListReference):
|
||||
# {list}
|
||||
list_names.add(expression.slot_name)
|
||||
|
||||
|
||||
def _get_debug_targets(
|
||||
hass: HomeAssistant,
|
||||
result: RecognizeResult,
|
||||
) -> Iterable[tuple[State, bool]]:
|
||||
"""Yield state/is_matched pairs for a hassil recognition."""
|
||||
entities = result.entities
|
||||
|
||||
name: str | None = None
|
||||
area_name: str | None = None
|
||||
domains: set[str] | None = None
|
||||
device_classes: set[str] | None = None
|
||||
state_names: set[str] | None = None
|
||||
|
||||
if "name" in entities:
|
||||
name = str(entities["name"].value)
|
||||
|
||||
if "area" in entities:
|
||||
area_name = str(entities["area"].value)
|
||||
|
||||
if "domain" in entities:
|
||||
domains = set(cv.ensure_list(entities["domain"].value))
|
||||
|
||||
if "device_class" in entities:
|
||||
device_classes = set(cv.ensure_list(entities["device_class"].value))
|
||||
|
||||
if "state" in entities:
|
||||
# HassGetState only
|
||||
state_names = set(cv.ensure_list(entities["state"].value))
|
||||
|
||||
if (
|
||||
(name is None)
|
||||
and (area_name is None)
|
||||
and (not domains)
|
||||
and (not device_classes)
|
||||
and (not state_names)
|
||||
):
|
||||
# Avoid "matching" all entities when there is no filter
|
||||
return
|
||||
|
||||
states = intent.async_match_states(
|
||||
hass,
|
||||
name=name,
|
||||
area_name=area_name,
|
||||
domains=domains,
|
||||
device_classes=device_classes,
|
||||
)
|
||||
|
||||
for state in states:
|
||||
# For queries, a target is "matched" based on its state
|
||||
is_matched = (state_names is None) or (state.state in state_names)
|
||||
yield state, is_matched
|
||||
|
||||
|
||||
def _get_unmatched_slots(
|
||||
result: RecognizeResult,
|
||||
) -> dict[str, str | int | float]:
|
||||
"""Return a dict of unmatched text/range slot entities."""
|
||||
unmatched_slots: dict[str, str | int | float] = {}
|
||||
for entity in result.unmatched_entities_list:
|
||||
if isinstance(entity, UnmatchedTextEntity):
|
||||
if entity.text == MISSING_ENTITY:
|
||||
# Don't report <missing> since these are just missing context
|
||||
# slots.
|
||||
continue
|
||||
|
||||
unmatched_slots[entity.name] = entity.text
|
||||
elif isinstance(entity, UnmatchedRangeEntity):
|
||||
unmatched_slots[entity.name] = entity.value
|
||||
|
||||
return unmatched_slots
|
||||
|
@@ -2,21 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from hassil.recognize import MISSING_ENTITY, RecognizeResult
|
||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||
from home_assistant_intents import get_language_scores
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http, websocket_api
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, intent
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util import language as language_util
|
||||
|
||||
from .agent_manager import (
|
||||
@@ -26,11 +21,6 @@ from .agent_manager import (
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT
|
||||
from .default_agent import (
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
METADATA_FUZZY_MATCH,
|
||||
)
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput
|
||||
|
||||
@@ -206,150 +196,12 @@ async def websocket_hass_agent_debug(
|
||||
language=msg.get("language", hass.config.language),
|
||||
agent_id=agent.entity_id,
|
||||
)
|
||||
result_dict: dict[str, Any] | None = None
|
||||
|
||||
if trigger_result := await agent.async_recognize_sentence_trigger(user_input):
|
||||
result_dict = {
|
||||
# Matched a user-defined sentence trigger.
|
||||
# We can't provide the response here without executing the
|
||||
# trigger.
|
||||
"match": True,
|
||||
"source": "trigger",
|
||||
"sentence_template": trigger_result.sentence_template or "",
|
||||
}
|
||||
elif intent_result := await agent.async_recognize_intent(user_input):
|
||||
successful_match = not intent_result.unmatched_entities
|
||||
result_dict = {
|
||||
# Name of the matching intent (or the closest)
|
||||
"intent": {
|
||||
"name": intent_result.intent.name,
|
||||
},
|
||||
# Slot values that would be received by the intent
|
||||
"slots": { # direct access to values
|
||||
entity_key: entity.text or entity.value
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Extra slot details, such as the originally matched text
|
||||
"details": {
|
||||
entity_key: {
|
||||
"name": entity.name,
|
||||
"value": entity.value,
|
||||
"text": entity.text,
|
||||
}
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Entities/areas/etc. that would be targeted
|
||||
"targets": {},
|
||||
# True if match was successful
|
||||
"match": successful_match,
|
||||
# Text of the sentence template that matched (or was closest)
|
||||
"sentence_template": "",
|
||||
# When match is incomplete, this will contain the best slot guesses
|
||||
"unmatched_slots": _get_unmatched_slots(intent_result),
|
||||
# True if match was not exact
|
||||
"fuzzy_match": False,
|
||||
}
|
||||
|
||||
if successful_match:
|
||||
result_dict["targets"] = {
|
||||
state.entity_id: {"matched": is_matched}
|
||||
for state, is_matched in _get_debug_targets(hass, intent_result)
|
||||
}
|
||||
|
||||
if intent_result.intent_sentence is not None:
|
||||
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
||||
|
||||
if intent_result.intent_metadata:
|
||||
# Inspect metadata to determine if this matched a custom sentence
|
||||
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
||||
result_dict["source"] = "custom"
|
||||
result_dict["file"] = intent_result.intent_metadata.get(
|
||||
METADATA_CUSTOM_FILE
|
||||
)
|
||||
else:
|
||||
result_dict["source"] = "builtin"
|
||||
|
||||
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
||||
METADATA_FUZZY_MATCH, False
|
||||
)
|
||||
|
||||
result_dict = await agent.async_debug_recognize(user_input)
|
||||
result_dicts.append(result_dict)
|
||||
|
||||
connection.send_result(msg["id"], {"results": result_dicts})
|
||||
|
||||
|
||||
def _get_debug_targets(
|
||||
hass: HomeAssistant,
|
||||
result: RecognizeResult,
|
||||
) -> Iterable[tuple[State, bool]]:
|
||||
"""Yield state/is_matched pairs for a hassil recognition."""
|
||||
entities = result.entities
|
||||
|
||||
name: str | None = None
|
||||
area_name: str | None = None
|
||||
domains: set[str] | None = None
|
||||
device_classes: set[str] | None = None
|
||||
state_names: set[str] | None = None
|
||||
|
||||
if "name" in entities:
|
||||
name = str(entities["name"].value)
|
||||
|
||||
if "area" in entities:
|
||||
area_name = str(entities["area"].value)
|
||||
|
||||
if "domain" in entities:
|
||||
domains = set(cv.ensure_list(entities["domain"].value))
|
||||
|
||||
if "device_class" in entities:
|
||||
device_classes = set(cv.ensure_list(entities["device_class"].value))
|
||||
|
||||
if "state" in entities:
|
||||
# HassGetState only
|
||||
state_names = set(cv.ensure_list(entities["state"].value))
|
||||
|
||||
if (
|
||||
(name is None)
|
||||
and (area_name is None)
|
||||
and (not domains)
|
||||
and (not device_classes)
|
||||
and (not state_names)
|
||||
):
|
||||
# Avoid "matching" all entities when there is no filter
|
||||
return
|
||||
|
||||
states = intent.async_match_states(
|
||||
hass,
|
||||
name=name,
|
||||
area_name=area_name,
|
||||
domains=domains,
|
||||
device_classes=device_classes,
|
||||
)
|
||||
|
||||
for state in states:
|
||||
# For queries, a target is "matched" based on its state
|
||||
is_matched = (state_names is None) or (state.state in state_names)
|
||||
yield state, is_matched
|
||||
|
||||
|
||||
def _get_unmatched_slots(
|
||||
result: RecognizeResult,
|
||||
) -> dict[str, str | int | float]:
|
||||
"""Return a dict of unmatched text/range slot entities."""
|
||||
unmatched_slots: dict[str, str | int | float] = {}
|
||||
for entity in result.unmatched_entities_list:
|
||||
if isinstance(entity, UnmatchedTextEntity):
|
||||
if entity.text == MISSING_ENTITY:
|
||||
# Don't report <missing> since these are just missing context
|
||||
# slots.
|
||||
continue
|
||||
|
||||
unmatched_slots[entity.name] = entity.text
|
||||
elif isinstance(entity, UnmatchedRangeEntity):
|
||||
unmatched_slots[entity.name] = entity.value
|
||||
|
||||
return unmatched_slots
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/agent/homeassistant/language_scores",
|
||||
@@ -364,10 +216,13 @@ async def websocket_hass_agent_language_scores(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get support scores per language."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
language = msg.get("language", hass.config.language)
|
||||
country = msg.get("country", hass.config.country)
|
||||
|
||||
scores = await hass.async_add_executor_job(get_language_scores)
|
||||
scores = await agent.async_get_language_scores()
|
||||
matching_langs = language_util.matches(language, scores.keys(), country=country)
|
||||
preferred_lang = matching_langs[0] if matching_langs else language
|
||||
result = {
|
||||
|
@@ -13,7 +13,7 @@ from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ( # noqa: F401
|
||||
from homeassistant.const import (
|
||||
SERVICE_CLOSE_COVER,
|
||||
SERVICE_CLOSE_COVER_TILT,
|
||||
SERVICE_OPEN_COVER,
|
||||
@@ -24,19 +24,9 @@ from homeassistant.const import ( # noqa: F401
|
||||
SERVICE_STOP_COVER_TILT,
|
||||
SERVICE_TOGGLE,
|
||||
SERVICE_TOGGLE_COVER_TILT,
|
||||
STATE_CLOSED,
|
||||
STATE_CLOSING,
|
||||
STATE_OPEN,
|
||||
STATE_OPENING,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -63,15 +53,6 @@ class CoverState(StrEnum):
|
||||
OPENING = "opening"
|
||||
|
||||
|
||||
# STATE_* below are deprecated as of 2024.11
|
||||
# when imported from homeassistant.components.cover
|
||||
# use the CoverState enum instead.
|
||||
_DEPRECATED_STATE_CLOSED = DeprecatedConstantEnum(CoverState.CLOSED, "2025.11")
|
||||
_DEPRECATED_STATE_CLOSING = DeprecatedConstantEnum(CoverState.CLOSING, "2025.11")
|
||||
_DEPRECATED_STATE_OPEN = DeprecatedConstantEnum(CoverState.OPEN, "2025.11")
|
||||
_DEPRECATED_STATE_OPENING = DeprecatedConstantEnum(CoverState.OPENING, "2025.11")
|
||||
|
||||
|
||||
class CoverDeviceClass(StrEnum):
|
||||
"""Device class for cover."""
|
||||
|
||||
@@ -463,11 +444,3 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return (
|
||||
fns["close"] if self._cover_is_last_toggle_direction_open else fns["open"]
|
||||
)
|
||||
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = ft.partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
@@ -24,6 +24,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import EnergyConverter
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -146,6 +147,7 @@ class DukeEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
name=f"{name_prefix} Consumption",
|
||||
source=DOMAIN,
|
||||
statistic_id=consumption_statistic_id,
|
||||
unit_class=EnergyConverter.UNIT_CLASS,
|
||||
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR
|
||||
if meter["serviceType"] == "ELECTRIC"
|
||||
else UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.1.0"]
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from elevenlabs import AsyncElevenLabs, Model
|
||||
from elevenlabs.core import ApiError
|
||||
@@ -18,9 +19,14 @@ from homeassistant.exceptions import (
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_MODEL
|
||||
from .const import CONF_MODEL, CONF_STT_MODEL
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.TTS]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.STT,
|
||||
Platform.TTS,
|
||||
]
|
||||
|
||||
|
||||
async def get_model_by_id(client: AsyncElevenLabs, model_id: str) -> Model | None:
|
||||
@@ -39,6 +45,7 @@ class ElevenLabsData:
|
||||
|
||||
client: AsyncElevenLabs
|
||||
model: Model
|
||||
stt_model: str
|
||||
|
||||
|
||||
type ElevenLabsConfigEntry = ConfigEntry[ElevenLabsData]
|
||||
@@ -62,7 +69,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -
|
||||
if model is None or (not model.languages):
|
||||
raise ConfigEntryError("Model could not be resolved")
|
||||
|
||||
entry.runtime_data = ElevenLabsData(client=client, model=model)
|
||||
entry.runtime_data = ElevenLabsData(
|
||||
client=client, model=model, stt_model=entry.options[CONF_STT_MODEL]
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
@@ -78,3 +87,44 @@ async def update_listener(
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: ElevenLabsConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old config entry to new format."""
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if config_entry.version == 1:
|
||||
new_options = {**config_entry.options}
|
||||
|
||||
if config_entry.minor_version < 2:
|
||||
# Add defaults only if they’re not already present
|
||||
if "stt_auto_language" not in new_options:
|
||||
new_options["stt_auto_language"] = False
|
||||
if "stt_model" not in new_options:
|
||||
new_options["stt_model"] = "scribe_v1"
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
options=new_options,
|
||||
minor_version=2,
|
||||
version=1,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True # already up to date
|
||||
|
@@ -25,15 +25,20 @@ from .const import (
|
||||
CONF_MODEL,
|
||||
CONF_SIMILARITY,
|
||||
CONF_STABILITY,
|
||||
CONF_STT_AUTO_LANGUAGE,
|
||||
CONF_STT_MODEL,
|
||||
CONF_STYLE,
|
||||
CONF_USE_SPEAKER_BOOST,
|
||||
CONF_VOICE,
|
||||
DEFAULT_MODEL,
|
||||
DEFAULT_SIMILARITY,
|
||||
DEFAULT_STABILITY,
|
||||
DEFAULT_STT_AUTO_LANGUAGE,
|
||||
DEFAULT_STT_MODEL,
|
||||
DEFAULT_STYLE,
|
||||
DEFAULT_TTS_MODEL,
|
||||
DEFAULT_USE_SPEAKER_BOOST,
|
||||
DOMAIN,
|
||||
STT_MODELS,
|
||||
)
|
||||
|
||||
USER_STEP_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str})
|
||||
@@ -68,6 +73,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for ElevenLabs text-to-speech."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -88,7 +94,12 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title="ElevenLabs",
|
||||
data=user_input,
|
||||
options={CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: list(voices)[0]},
|
||||
options={
|
||||
CONF_MODEL: DEFAULT_TTS_MODEL,
|
||||
CONF_VOICE: list(voices)[0],
|
||||
CONF_STT_MODEL: DEFAULT_STT_MODEL,
|
||||
CONF_STT_AUTO_LANGUAGE: False,
|
||||
},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=USER_STEP_SCHEMA, errors=errors
|
||||
@@ -113,6 +124,9 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
self.models: dict[str, str] = {}
|
||||
self.model: str | None = None
|
||||
self.voice: str | None = None
|
||||
self.stt_models: dict[str, str] = STT_MODELS
|
||||
self.stt_model: str | None = None
|
||||
self.auto_language: bool | None = None
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -126,6 +140,8 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
if user_input is not None:
|
||||
self.model = user_input[CONF_MODEL]
|
||||
self.voice = user_input[CONF_VOICE]
|
||||
self.stt_model = user_input[CONF_STT_MODEL]
|
||||
self.auto_language = user_input[CONF_STT_AUTO_LANGUAGE]
|
||||
configure_voice = user_input.pop(CONF_CONFIGURE_VOICE)
|
||||
if configure_voice:
|
||||
return await self.async_step_voice_settings()
|
||||
@@ -165,6 +181,22 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
]
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
CONF_STT_MODEL,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(label=model_name, value=model_id)
|
||||
for model_id, model_name in self.stt_models.items()
|
||||
]
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
CONF_STT_AUTO_LANGUAGE,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_STT_AUTO_LANGUAGE, DEFAULT_STT_AUTO_LANGUAGE
|
||||
),
|
||||
): bool,
|
||||
vol.Required(CONF_CONFIGURE_VOICE, default=False): bool,
|
||||
}
|
||||
),
|
||||
@@ -179,6 +211,8 @@ class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
if user_input is not None:
|
||||
user_input[CONF_MODEL] = self.model
|
||||
user_input[CONF_VOICE] = self.voice
|
||||
user_input[CONF_STT_MODEL] = self.stt_model
|
||||
user_input[CONF_STT_AUTO_LANGUAGE] = self.auto_language
|
||||
return self.async_create_entry(
|
||||
title="ElevenLabs",
|
||||
data=user_input,
|
||||
|
@@ -7,12 +7,123 @@ CONF_MODEL = "model"
|
||||
CONF_CONFIGURE_VOICE = "configure_voice"
|
||||
CONF_STABILITY = "stability"
|
||||
CONF_SIMILARITY = "similarity"
|
||||
CONF_STT_AUTO_LANGUAGE = "stt_auto_language"
|
||||
CONF_STT_MODEL = "stt_model"
|
||||
CONF_STYLE = "style"
|
||||
CONF_USE_SPEAKER_BOOST = "use_speaker_boost"
|
||||
DOMAIN = "elevenlabs"
|
||||
|
||||
DEFAULT_MODEL = "eleven_multilingual_v2"
|
||||
DEFAULT_TTS_MODEL = "eleven_multilingual_v2"
|
||||
DEFAULT_STABILITY = 0.5
|
||||
DEFAULT_SIMILARITY = 0.75
|
||||
DEFAULT_STT_AUTO_LANGUAGE = False
|
||||
DEFAULT_STT_MODEL = "scribe_v1"
|
||||
DEFAULT_STYLE = 0
|
||||
DEFAULT_USE_SPEAKER_BOOST = True
|
||||
|
||||
STT_LANGUAGES = [
|
||||
"af-ZA", # Afrikaans
|
||||
"am-ET", # Amharic
|
||||
"ar-SA", # Arabic
|
||||
"hy-AM", # Armenian
|
||||
"as-IN", # Assamese
|
||||
"ast-ES", # Asturian
|
||||
"az-AZ", # Azerbaijani
|
||||
"be-BY", # Belarusian
|
||||
"bn-IN", # Bengali
|
||||
"bs-BA", # Bosnian
|
||||
"bg-BG", # Bulgarian
|
||||
"my-MM", # Burmese
|
||||
"yue-HK", # Cantonese
|
||||
"ca-ES", # Catalan
|
||||
"ceb-PH", # Cebuano
|
||||
"ny-MW", # Chichewa
|
||||
"hr-HR", # Croatian
|
||||
"cs-CZ", # Czech
|
||||
"da-DK", # Danish
|
||||
"nl-NL", # Dutch
|
||||
"en-US", # English
|
||||
"et-EE", # Estonian
|
||||
"fil-PH", # Filipino
|
||||
"fi-FI", # Finnish
|
||||
"fr-FR", # French
|
||||
"ff-SN", # Fulah
|
||||
"gl-ES", # Galician
|
||||
"lg-UG", # Ganda
|
||||
"ka-GE", # Georgian
|
||||
"de-DE", # German
|
||||
"el-GR", # Greek
|
||||
"gu-IN", # Gujarati
|
||||
"ha-NG", # Hausa
|
||||
"he-IL", # Hebrew
|
||||
"hi-IN", # Hindi
|
||||
"hu-HU", # Hungarian
|
||||
"is-IS", # Icelandic
|
||||
"ig-NG", # Igbo
|
||||
"id-ID", # Indonesian
|
||||
"ga-IE", # Irish
|
||||
"it-IT", # Italian
|
||||
"ja-JP", # Japanese
|
||||
"jv-ID", # Javanese
|
||||
"kea-CV", # Kabuverdianu
|
||||
"kn-IN", # Kannada
|
||||
"kk-KZ", # Kazakh
|
||||
"km-KH", # Khmer
|
||||
"ko-KR", # Korean
|
||||
"ku-TR", # Kurdish
|
||||
"ky-KG", # Kyrgyz
|
||||
"lo-LA", # Lao
|
||||
"lv-LV", # Latvian
|
||||
"ln-CD", # Lingala
|
||||
"lt-LT", # Lithuanian
|
||||
"luo-KE", # Luo
|
||||
"lb-LU", # Luxembourgish
|
||||
"mk-MK", # Macedonian
|
||||
"ms-MY", # Malay
|
||||
"ml-IN", # Malayalam
|
||||
"mt-MT", # Maltese
|
||||
"zh-CN", # Mandarin Chinese
|
||||
"mi-NZ", # Māori
|
||||
"mr-IN", # Marathi
|
||||
"mn-MN", # Mongolian
|
||||
"ne-NP", # Nepali
|
||||
"nso-ZA", # Northern Sotho
|
||||
"no-NO", # Norwegian
|
||||
"oc-FR", # Occitan
|
||||
"or-IN", # Odia
|
||||
"ps-AF", # Pashto
|
||||
"fa-IR", # Persian
|
||||
"pl-PL", # Polish
|
||||
"pt-PT", # Portuguese
|
||||
"pa-IN", # Punjabi
|
||||
"ro-RO", # Romanian
|
||||
"ru-RU", # Russian
|
||||
"sr-RS", # Serbian
|
||||
"sn-ZW", # Shona
|
||||
"sd-PK", # Sindhi
|
||||
"sk-SK", # Slovak
|
||||
"sl-SI", # Slovenian
|
||||
"so-SO", # Somali
|
||||
"es-ES", # Spanish
|
||||
"sw-KE", # Swahili
|
||||
"sv-SE", # Swedish
|
||||
"ta-IN", # Tamil
|
||||
"tg-TJ", # Tajik
|
||||
"te-IN", # Telugu
|
||||
"th-TH", # Thai
|
||||
"tr-TR", # Turkish
|
||||
"uk-UA", # Ukrainian
|
||||
"umb-AO", # Umbundu
|
||||
"ur-PK", # Urdu
|
||||
"uz-UZ", # Uzbek
|
||||
"vi-VN", # Vietnamese
|
||||
"cy-GB", # Welsh
|
||||
"wo-SN", # Wolof
|
||||
"xh-ZA", # Xhosa
|
||||
"zu-ZA", # Zulu
|
||||
]
|
||||
|
||||
STT_MODELS = {
|
||||
"scribe_v1": "Scribe v1",
|
||||
"scribe_v1_experimental": "Scribe v1 Experimental",
|
||||
}
|
||||
|
@@ -21,11 +21,15 @@
|
||||
"data": {
|
||||
"voice": "Voice",
|
||||
"model": "Model",
|
||||
"stt_model": "Speech-to-Text Model",
|
||||
"stt_auto_language": "Auto-detect language",
|
||||
"configure_voice": "Configure advanced voice settings"
|
||||
},
|
||||
"data_description": {
|
||||
"voice": "Voice to use for the TTS.",
|
||||
"voice": "Voice to use for text-to-speech.",
|
||||
"model": "ElevenLabs model to use. Please note that not all models support all languages equally well.",
|
||||
"stt_model": "Speech-to-Text model to use.",
|
||||
"stt_auto_language": "Automatically detect the spoken language for speech-to-text.",
|
||||
"configure_voice": "Configure advanced voice settings. Find more information in the ElevenLabs documentation."
|
||||
}
|
||||
},
|
||||
@@ -44,5 +48,17 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"tts": {
|
||||
"elevenlabs_tts": {
|
||||
"name": "Text-to-Speech"
|
||||
}
|
||||
},
|
||||
"stt": {
|
||||
"elevenlabs_stt": {
|
||||
"name": "Speech-to-Text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
207
homeassistant/components/elevenlabs/stt.py
Normal file
207
homeassistant/components/elevenlabs/stt.py
Normal file
@@ -0,0 +1,207 @@
|
||||
"""Support for the ElevenLabs speech-to-text service."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterable
|
||||
from io import BytesIO
|
||||
import logging
|
||||
|
||||
from elevenlabs import AsyncElevenLabs
|
||||
from elevenlabs.core import ApiError
|
||||
from elevenlabs.types import Model
|
||||
|
||||
from homeassistant.components import stt
|
||||
from homeassistant.components.stt import (
|
||||
AudioBitRates,
|
||||
AudioChannels,
|
||||
AudioCodecs,
|
||||
AudioFormats,
|
||||
AudioSampleRates,
|
||||
SpeechMetadata,
|
||||
SpeechResultState,
|
||||
SpeechToTextEntity,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import ElevenLabsConfigEntry
|
||||
from .const import (
|
||||
CONF_STT_AUTO_LANGUAGE,
|
||||
DEFAULT_STT_AUTO_LANGUAGE,
|
||||
DOMAIN,
|
||||
STT_LANGUAGES,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PARALLEL_UPDATES = 10
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ElevenLabsConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up ElevenLabs stt platform via config entry."""
|
||||
client = config_entry.runtime_data.client
|
||||
auto_detect = config_entry.options.get(
|
||||
CONF_STT_AUTO_LANGUAGE, DEFAULT_STT_AUTO_LANGUAGE
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
ElevenLabsSTTEntity(
|
||||
client,
|
||||
config_entry.runtime_data.model,
|
||||
config_entry.runtime_data.stt_model,
|
||||
config_entry.entry_id,
|
||||
auto_detect_language=auto_detect,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class ElevenLabsSTTEntity(SpeechToTextEntity):
|
||||
"""The ElevenLabs STT API entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "elevenlabs_stt"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
client: AsyncElevenLabs,
|
||||
model: Model,
|
||||
stt_model: str,
|
||||
entry_id: str,
|
||||
auto_detect_language: bool = False,
|
||||
) -> None:
|
||||
"""Init ElevenLabs TTS service."""
|
||||
self._client = client
|
||||
self._auto_detect_language = auto_detect_language
|
||||
self._stt_model = stt_model
|
||||
|
||||
# Entity attributes
|
||||
self._attr_unique_id = entry_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, entry_id)},
|
||||
manufacturer="ElevenLabs",
|
||||
model=model.name,
|
||||
name="ElevenLabs",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> list[str]:
|
||||
"""Return a list of supported languages."""
|
||||
return STT_LANGUAGES
|
||||
|
||||
@property
|
||||
def supported_formats(self) -> list[AudioFormats]:
|
||||
"""Return a list of supported formats."""
|
||||
return [AudioFormats.WAV, AudioFormats.OGG]
|
||||
|
||||
@property
|
||||
def supported_codecs(self) -> list[AudioCodecs]:
|
||||
"""Return a list of supported codecs."""
|
||||
return [AudioCodecs.PCM, AudioCodecs.OPUS]
|
||||
|
||||
@property
|
||||
def supported_bit_rates(self) -> list[AudioBitRates]:
|
||||
"""Return a list of supported bit rates."""
|
||||
return [AudioBitRates.BITRATE_16]
|
||||
|
||||
@property
|
||||
def supported_sample_rates(self) -> list[AudioSampleRates]:
|
||||
"""Return a list of supported sample rates."""
|
||||
return [AudioSampleRates.SAMPLERATE_16000]
|
||||
|
||||
@property
|
||||
def supported_channels(self) -> list[AudioChannels]:
|
||||
"""Return a list of supported channels."""
|
||||
return [
|
||||
AudioChannels.CHANNEL_MONO,
|
||||
AudioChannels.CHANNEL_STEREO,
|
||||
]
|
||||
|
||||
async def async_process_audio_stream(
|
||||
self, metadata: SpeechMetadata, stream: AsyncIterable[bytes]
|
||||
) -> stt.SpeechResult:
|
||||
"""Process an audio stream to STT service."""
|
||||
_LOGGER.debug(
|
||||
"Processing audio stream for STT: model=%s, language=%s, format=%s, codec=%s, sample_rate=%s, channels=%s, bit_rate=%s",
|
||||
self._stt_model,
|
||||
metadata.language,
|
||||
metadata.format,
|
||||
metadata.codec,
|
||||
metadata.sample_rate,
|
||||
metadata.channel,
|
||||
metadata.bit_rate,
|
||||
)
|
||||
|
||||
if self._auto_detect_language:
|
||||
lang_code = None
|
||||
else:
|
||||
language = metadata.language
|
||||
if language.lower() not in [lang.lower() for lang in STT_LANGUAGES]:
|
||||
_LOGGER.warning("Unsupported language: %s", language)
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
lang_code = language.split("-")[0]
|
||||
|
||||
raw_pcm_compatible = (
|
||||
metadata.codec == AudioCodecs.PCM
|
||||
and metadata.sample_rate == AudioSampleRates.SAMPLERATE_16000
|
||||
and metadata.channel == AudioChannels.CHANNEL_MONO
|
||||
and metadata.bit_rate == AudioBitRates.BITRATE_16
|
||||
)
|
||||
if raw_pcm_compatible:
|
||||
file_format = "pcm_s16le_16"
|
||||
elif metadata.codec == AudioCodecs.PCM:
|
||||
_LOGGER.warning("PCM input does not meet expected raw format requirements")
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
else:
|
||||
file_format = "other"
|
||||
|
||||
audio = b""
|
||||
async for chunk in stream:
|
||||
audio += chunk
|
||||
|
||||
_LOGGER.debug("Finished reading audio stream, total size: %d bytes", len(audio))
|
||||
if not audio:
|
||||
_LOGGER.warning("No audio received in stream")
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
|
||||
lang_display = lang_code if lang_code else "auto-detected"
|
||||
|
||||
_LOGGER.debug(
|
||||
"Transcribing audio (%s), format: %s, size: %d bytes",
|
||||
lang_display,
|
||||
file_format,
|
||||
len(audio),
|
||||
)
|
||||
|
||||
try:
|
||||
response = await self._client.speech_to_text.convert(
|
||||
file=BytesIO(audio),
|
||||
file_format=file_format,
|
||||
model_id=self._stt_model,
|
||||
language_code=lang_code,
|
||||
tag_audio_events=False,
|
||||
num_speakers=1,
|
||||
diarize=False,
|
||||
)
|
||||
except ApiError as exc:
|
||||
_LOGGER.error("Error during processing of STT request: %s", exc)
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
|
||||
text = response.text or ""
|
||||
detected_lang_code = response.language_code or "?"
|
||||
detected_lang_prob = response.language_probability or "?"
|
||||
|
||||
_LOGGER.debug(
|
||||
"Transcribed text is in language %s (probability %s): %s",
|
||||
detected_lang_code,
|
||||
detected_lang_prob,
|
||||
text,
|
||||
)
|
||||
|
||||
return stt.SpeechResult(text, SpeechResultState.SUCCESS)
|
@@ -71,7 +71,6 @@ async def async_setup_entry(
|
||||
voices,
|
||||
default_voice_id,
|
||||
config_entry.entry_id,
|
||||
config_entry.title,
|
||||
voice_settings,
|
||||
)
|
||||
]
|
||||
@@ -83,6 +82,8 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
|
||||
_attr_supported_options = [ATTR_VOICE, ATTR_MODEL]
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "elevenlabs_tts"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -91,7 +92,6 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
voices: list[ElevenLabsVoice],
|
||||
default_voice_id: str,
|
||||
entry_id: str,
|
||||
title: str,
|
||||
voice_settings: VoiceSettings,
|
||||
) -> None:
|
||||
"""Init ElevenLabs TTS service."""
|
||||
@@ -112,11 +112,11 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
|
||||
# Entity attributes
|
||||
self._attr_unique_id = entry_id
|
||||
self._attr_name = title
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, entry_id)},
|
||||
manufacturer="ElevenLabs",
|
||||
model=model.name,
|
||||
name="ElevenLabs",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._attr_supported_languages = [
|
||||
|
@@ -20,6 +20,7 @@ from homeassistant.components.recorder.statistics import (
|
||||
from homeassistant.components.recorder.util import get_instance
|
||||
from homeassistant.const import UnitOfEnergy
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import EnergyConverter
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
@@ -153,6 +154,7 @@ class ElviaImporter:
|
||||
name=f"{self.metering_point_id} Consumption",
|
||||
source=DOMAIN,
|
||||
statistic_id=statistic_id,
|
||||
unit_class=EnergyConverter.UNIT_CLASS,
|
||||
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
statistics=statistics,
|
||||
|
@@ -116,10 +116,6 @@ class WaterSourceType(TypedDict):
|
||||
# an EnergyCostSensor will be automatically created
|
||||
stat_cost: str | None
|
||||
|
||||
# An optional statistic_id identifying a device
|
||||
# that includes this device's consumption in its total
|
||||
included_in_stat: str | None
|
||||
|
||||
# Used to generate costs if stat_cost is set to None
|
||||
entity_energy_price: str | None # entity_id of an entity providing price ($/m³)
|
||||
number_energy_price: float | None # Price for energy ($/m³)
|
||||
|
@@ -47,11 +47,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ECConfigEntry) ->
|
||||
radar_coordinator = ECDataUpdateCoordinator(
|
||||
hass, config_entry, radar_data, "radar", DEFAULT_RADAR_UPDATE_INTERVAL
|
||||
)
|
||||
try:
|
||||
await radar_coordinator.async_config_entry_first_refresh()
|
||||
except ConfigEntryNotReady:
|
||||
errors = errors + 1
|
||||
_LOGGER.warning("Unable to retrieve Environment Canada radar")
|
||||
# Skip initial refresh for radar since the camera entity is disabled by default.
|
||||
# The coordinator will fetch data when the entity is enabled.
|
||||
|
||||
aqhi_data = ECAirQuality(coordinates=(lat, lon))
|
||||
aqhi_coordinator = ECDataUpdateCoordinator(
|
||||
@@ -63,7 +60,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ECConfigEntry) ->
|
||||
errors = errors + 1
|
||||
_LOGGER.warning("Unable to retrieve Environment Canada AQHI")
|
||||
|
||||
if errors == 3:
|
||||
# Require at least one coordinator to succeed (weather or AQHI)
|
||||
# Radar is optional since the camera entity is disabled by default
|
||||
if errors >= 2:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
config_entry.runtime_data = ECRuntimeData(
|
||||
|
@@ -59,6 +59,14 @@ class ECCameraEntity(CoordinatorEntity[ECDataUpdateCoordinator[ECRadar]], Camera
|
||||
|
||||
self.content_type = "image/gif"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
# Trigger coordinator refresh when entity is enabled
|
||||
# since radar coordinator skips initial refresh during setup
|
||||
if not self.coordinator.last_update_success:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
def camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
|
@@ -6,11 +6,18 @@ import xml.etree.ElementTree as ET
|
||||
|
||||
import aiohttp
|
||||
from env_canada import ECWeather, ec_exc
|
||||
from env_canada.ec_weather import get_ec_sites_list
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_STATION, CONF_TITLE, DOMAIN
|
||||
|
||||
@@ -25,14 +32,16 @@ async def validate_input(data):
|
||||
lang = data.get(CONF_LANGUAGE).lower()
|
||||
|
||||
if station:
|
||||
# When station is provided, use it and get the coordinates from ECWeather
|
||||
weather_data = ECWeather(station_id=station, language=lang)
|
||||
else:
|
||||
weather_data = ECWeather(coordinates=(lat, lon), language=lang)
|
||||
await weather_data.update()
|
||||
|
||||
if lat is None or lon is None:
|
||||
# Always use the station's coordinates, not the user-provided ones
|
||||
lat = weather_data.lat
|
||||
lon = weather_data.lon
|
||||
else:
|
||||
# When no station is provided, use coordinates to find nearest station
|
||||
weather_data = ECWeather(coordinates=(lat, lon), language=lang)
|
||||
await weather_data.update()
|
||||
|
||||
return {
|
||||
CONF_TITLE: weather_data.metadata.location,
|
||||
@@ -46,6 +55,13 @@ class EnvironmentCanadaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Environment Canada weather."""
|
||||
|
||||
VERSION = 1
|
||||
_station_codes: list[dict[str, str]] | None = None
|
||||
|
||||
async def _get_station_codes(self) -> list[dict[str, str]]:
|
||||
"""Get station codes, cached after first call."""
|
||||
if self._station_codes is None:
|
||||
self._station_codes = await get_ec_sites_list()
|
||||
return self._station_codes
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -80,9 +96,21 @@ class EnvironmentCanadaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=info[CONF_TITLE], data=user_input)
|
||||
|
||||
station_codes = await self._get_station_codes()
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_STATION): str,
|
||||
vol.Optional(CONF_STATION): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
value=station["value"], label=station["label"]
|
||||
)
|
||||
for station in station_codes
|
||||
],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_LATITUDE, default=self.hass.config.latitude
|
||||
): cv.latitude,
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.11.2"]
|
||||
"requirements": ["env-canada==0.12.1"]
|
||||
}
|
||||
|
@@ -3,11 +3,11 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Environment Canada: weather location and language",
|
||||
"description": "Either a station ID or latitude/longitude must be specified. The default latitude/longitude used are the values configured in your Home Assistant installation. The closest weather station to the coordinates will be used if specifying coordinates. If a station code is used it must follow the format: PP/code, where PP is the two-letter province and code is the station ID. The list of station IDs can be found here: https://dd.weather.gc.ca/citypage_weather/docs/site_list_towns_en.csv. Weather information can be retrieved in either English or French.",
|
||||
"description": "Select a weather station from the dropdown, or specify coordinates to use the closest station. The default coordinates are from your Home Assistant installation. Weather information can be retrieved in English or French.",
|
||||
"data": {
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"station": "Weather station ID",
|
||||
"station": "Weather station",
|
||||
"language": "Weather information language"
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,7 @@
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
},
|
||||
"error": {
|
||||
"bad_station_id": "Station ID is invalid, missing, or not found in the station ID database",
|
||||
"bad_station_id": "Station code is invalid, missing, or not found in the station code database",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"error_response": "Response from Environment Canada in error",
|
||||
"too_many_attempts": "Connections to Environment Canada are rate limited; Try again in 60 seconds",
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.12.0",
|
||||
"aioesphomeapi==41.15.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
@@ -29,7 +29,12 @@ from homeassistant.components.climate import (
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_MODE, PRECISION_TENTHS, UnitOfTemperature
|
||||
from homeassistant.const import (
|
||||
ATTR_MODE,
|
||||
ATTR_TEMPERATURE,
|
||||
PRECISION_TENTHS,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -243,7 +248,7 @@ class EvoZone(EvoChild, EvoClimateEntity):
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set a new target temperature."""
|
||||
|
||||
temperature = kwargs["temperature"]
|
||||
temperature = kwargs[ATTR_TEMPERATURE]
|
||||
|
||||
if (until := kwargs.get("until")) is None:
|
||||
if self._evo_device.mode == EvoZoneMode.TEMPORARY_OVERRIDE:
|
||||
|
@@ -35,9 +35,16 @@ class FoscamDeviceInfo:
|
||||
is_turn_off_volume: bool
|
||||
is_turn_off_light: bool
|
||||
supports_speak_volume_adjustment: bool
|
||||
supports_pet_adjustment: bool
|
||||
supports_car_adjustment: bool
|
||||
supports_wdr_adjustment: bool
|
||||
supports_hdr_adjustment: bool
|
||||
|
||||
is_open_wdr: bool | None = None
|
||||
is_open_hdr: bool | None = None
|
||||
is_pet_detection_on: bool | None = None
|
||||
is_car_detection_on: bool | None = None
|
||||
is_human_detection_on: bool | None = None
|
||||
|
||||
|
||||
class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
@@ -107,14 +114,15 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
|
||||
is_open_wdr = None
|
||||
is_open_hdr = None
|
||||
reserve3 = product_info.get("reserve3")
|
||||
reserve3 = product_info.get("reserve4")
|
||||
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
||||
|
||||
if (reserve3_int & (1 << 8)) != 0:
|
||||
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
||||
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
||||
if supports_wdr_adjustment_val:
|
||||
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
||||
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
||||
is_open_wdr = bool(int(mode))
|
||||
else:
|
||||
elif supports_hdr_adjustment_val:
|
||||
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
||||
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
||||
is_open_hdr = bool(int(mode))
|
||||
@@ -126,6 +134,34 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
pet_adjustment_val = (
|
||||
bool(int(software_capabilities.get("swCapabilities2")) & 512)
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
car_adjustment_val = (
|
||||
bool(int(software_capabilities.get("swCapabilities2")) & 256)
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
ret_md, mothion_config_val = self.session.get_motion_detect_config()
|
||||
if pet_adjustment_val:
|
||||
is_pet_detection_on_val = (
|
||||
mothion_config_val["petEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_pet_detection_on_val = False
|
||||
|
||||
if car_adjustment_val:
|
||||
is_car_detection_on_val = (
|
||||
mothion_config_val["carEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_car_detection_on_val = False
|
||||
|
||||
is_human_detection_on_val = (
|
||||
mothion_config_val["humanEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
|
||||
return FoscamDeviceInfo(
|
||||
dev_info=dev_info,
|
||||
@@ -141,8 +177,15 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
is_turn_off_volume=is_turn_off_volume_val,
|
||||
is_turn_off_light=is_turn_off_light_val,
|
||||
supports_speak_volume_adjustment=supports_speak_volume_adjustment_val,
|
||||
supports_pet_adjustment=pet_adjustment_val,
|
||||
supports_car_adjustment=car_adjustment_val,
|
||||
supports_hdr_adjustment=supports_hdr_adjustment_val,
|
||||
supports_wdr_adjustment=supports_wdr_adjustment_val,
|
||||
is_open_wdr=is_open_wdr,
|
||||
is_open_hdr=is_open_hdr,
|
||||
is_pet_detection_on=is_pet_detection_on_val,
|
||||
is_car_detection_on=is_car_detection_on_val,
|
||||
is_human_detection_on=is_human_detection_on_val,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> FoscamDeviceInfo:
|
||||
|
@@ -38,6 +38,15 @@
|
||||
},
|
||||
"wdr_switch": {
|
||||
"default": "mdi:alpha-w-box"
|
||||
},
|
||||
"pet_detection": {
|
||||
"default": "mdi:paw"
|
||||
},
|
||||
"car_detection": {
|
||||
"default": "mdi:car-hatchback"
|
||||
},
|
||||
"human_detection": {
|
||||
"default": "mdi:human"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
|
@@ -22,7 +22,7 @@ class FoscamNumberEntityDescription(NumberEntityDescription):
|
||||
|
||||
native_value_fn: Callable[[FoscamCoordinator], int]
|
||||
set_value_fn: Callable[[FoscamCamera, float], Any]
|
||||
exists_fn: Callable[[FoscamCoordinator], bool]
|
||||
exists_fn: Callable[[FoscamCoordinator], bool] = lambda _: True
|
||||
|
||||
|
||||
NUMBER_DESCRIPTIONS: list[FoscamNumberEntityDescription] = [
|
||||
@@ -34,7 +34,6 @@ NUMBER_DESCRIPTIONS: list[FoscamNumberEntityDescription] = [
|
||||
native_step=1,
|
||||
native_value_fn=lambda coordinator: coordinator.data.device_volume,
|
||||
set_value_fn=lambda session, value: session.setAudioVolume(value),
|
||||
exists_fn=lambda _: True,
|
||||
),
|
||||
FoscamNumberEntityDescription(
|
||||
key="speak_volume",
|
||||
|
@@ -61,6 +61,15 @@
|
||||
},
|
||||
"wdr_switch": {
|
||||
"name": "WDR"
|
||||
},
|
||||
"pet_detection": {
|
||||
"name": "Pet detection"
|
||||
},
|
||||
"car_detection": {
|
||||
"name": "Car detection"
|
||||
},
|
||||
"human_detection": {
|
||||
"name": "Human detection"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
|
@@ -30,6 +30,14 @@ def handle_ir_turn_off(session: FoscamCamera) -> None:
|
||||
session.close_infra_led()
|
||||
|
||||
|
||||
def set_motion_detection(session: FoscamCamera, field: str, enabled: bool) -> None:
|
||||
"""Turns on pet detection."""
|
||||
ret, config = session.get_motion_detect_config()
|
||||
if not ret:
|
||||
config[field] = int(enabled)
|
||||
session.set_motion_detect_config(config)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class FoscamSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""A custom entity description that supports a turn_off function."""
|
||||
@@ -37,6 +45,7 @@ class FoscamSwitchEntityDescription(SwitchEntityDescription):
|
||||
native_value_fn: Callable[..., bool]
|
||||
turn_off_fn: Callable[[FoscamCamera], None]
|
||||
turn_on_fn: Callable[[FoscamCamera], None]
|
||||
exists_fn: Callable[[FoscamCoordinator], bool] = lambda _: True
|
||||
|
||||
|
||||
SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
|
||||
@@ -102,6 +111,7 @@ SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
|
||||
native_value_fn=lambda data: data.is_open_hdr,
|
||||
turn_off_fn=lambda session: session.setHdrMode(0),
|
||||
turn_on_fn=lambda session: session.setHdrMode(1),
|
||||
exists_fn=lambda coordinator: coordinator.data.supports_hdr_adjustment,
|
||||
),
|
||||
FoscamSwitchEntityDescription(
|
||||
key="is_open_wdr",
|
||||
@@ -109,6 +119,30 @@ SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
|
||||
native_value_fn=lambda data: data.is_open_wdr,
|
||||
turn_off_fn=lambda session: session.setWdrMode(0),
|
||||
turn_on_fn=lambda session: session.setWdrMode(1),
|
||||
exists_fn=lambda coordinator: coordinator.data.supports_wdr_adjustment,
|
||||
),
|
||||
FoscamSwitchEntityDescription(
|
||||
key="pet_detection",
|
||||
translation_key="pet_detection",
|
||||
native_value_fn=lambda data: data.is_pet_detection_on,
|
||||
turn_off_fn=lambda session: set_motion_detection(session, "petEnable", False),
|
||||
turn_on_fn=lambda session: set_motion_detection(session, "petEnable", True),
|
||||
exists_fn=lambda coordinator: coordinator.data.supports_pet_adjustment,
|
||||
),
|
||||
FoscamSwitchEntityDescription(
|
||||
key="car_detection",
|
||||
translation_key="car_detection",
|
||||
native_value_fn=lambda data: data.is_car_detection_on,
|
||||
turn_off_fn=lambda session: set_motion_detection(session, "carEnable", False),
|
||||
turn_on_fn=lambda session: set_motion_detection(session, "carEnable", True),
|
||||
exists_fn=lambda coordinator: coordinator.data.supports_car_adjustment,
|
||||
),
|
||||
FoscamSwitchEntityDescription(
|
||||
key="human_detection",
|
||||
translation_key="human_detection",
|
||||
native_value_fn=lambda data: data.is_human_detection_on,
|
||||
turn_off_fn=lambda session: set_motion_detection(session, "humanEnable", False),
|
||||
turn_on_fn=lambda session: set_motion_detection(session, "humanEnable", True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -122,24 +156,11 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
entities = []
|
||||
|
||||
product_info = coordinator.data.product_info
|
||||
reserve3 = product_info.get("reserve3", "0")
|
||||
|
||||
for description in SWITCH_DESCRIPTIONS:
|
||||
if description.key == "is_asleep":
|
||||
if not coordinator.data.is_asleep["supported"]:
|
||||
continue
|
||||
elif description.key == "is_open_hdr":
|
||||
if ((1 << 8) & int(reserve3)) != 0 or ((1 << 7) & int(reserve3)) == 0:
|
||||
continue
|
||||
elif description.key == "is_open_wdr":
|
||||
if ((1 << 8) & int(reserve3)) == 0:
|
||||
continue
|
||||
|
||||
entities.append(FoscamGenericSwitch(coordinator, description))
|
||||
async_add_entities(entities)
|
||||
async_add_entities(
|
||||
FoscamGenericSwitch(coordinator, description)
|
||||
for description in SWITCH_DESCRIPTIONS
|
||||
if description.exists_fn(coordinator)
|
||||
)
|
||||
|
||||
|
||||
class FoscamGenericSwitch(FoscamEntity, SwitchEntity):
|
||||
|
@@ -13,27 +13,14 @@ from homeassistant.components.climate import (
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_BATTERY_LEVEL,
|
||||
ATTR_TEMPERATURE,
|
||||
PRECISION_HALVES,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
ATTR_STATE_BATTERY_LOW,
|
||||
ATTR_STATE_HOLIDAY_MODE,
|
||||
ATTR_STATE_SUMMER_MODE,
|
||||
ATTR_STATE_WINDOW_OPEN,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
from .model import ClimateExtraAttributes
|
||||
from .sensor import value_scheduled_preset
|
||||
|
||||
HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF]
|
||||
@@ -202,26 +189,6 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
|
||||
self.check_active_or_lock_mode()
|
||||
await self.async_set_hkr_state(PRESET_API_HKR_STATE_MAPPING[preset_mode])
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> ClimateExtraAttributes:
|
||||
"""Return the device specific state attributes."""
|
||||
# deprecated with #143394, can be removed in 2025.11
|
||||
attrs: ClimateExtraAttributes = {
|
||||
ATTR_STATE_BATTERY_LOW: self.data.battery_low,
|
||||
}
|
||||
|
||||
# the following attributes are available since fritzos 7
|
||||
if self.data.battery_level is not None:
|
||||
attrs[ATTR_BATTERY_LEVEL] = self.data.battery_level
|
||||
if self.data.holiday_active is not None:
|
||||
attrs[ATTR_STATE_HOLIDAY_MODE] = self.data.holiday_active
|
||||
if self.data.summer_active is not None:
|
||||
attrs[ATTR_STATE_SUMMER_MODE] = self.data.summer_active
|
||||
if self.data.window_open is not None:
|
||||
attrs[ATTR_STATE_WINDOW_OPEN] = self.data.window_open
|
||||
|
||||
return attrs
|
||||
|
||||
def check_active_or_lock_mode(self) -> None:
|
||||
"""Check if in summer/vacation mode or lock enabled."""
|
||||
if self.data.holiday_active or self.data.summer_active:
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251001.0"]
|
||||
"requirements": ["home-assistant-frontend==20251001.4"]
|
||||
}
|
||||
|
@@ -191,7 +191,9 @@ async def async_test_still(
|
||||
try:
|
||||
async_client = get_async_client(hass, verify_ssl=verify_ssl)
|
||||
async with asyncio.timeout(GET_IMAGE_TIMEOUT):
|
||||
response = await async_client.get(url, auth=auth, timeout=GET_IMAGE_TIMEOUT)
|
||||
response = await async_client.get(
|
||||
url, auth=auth, timeout=GET_IMAGE_TIMEOUT, follow_redirects=True
|
||||
)
|
||||
response.raise_for_status()
|
||||
image = response.content
|
||||
except (
|
||||
|
@@ -167,6 +167,6 @@ class GitHubDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
)
|
||||
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.unsubscribe)
|
||||
|
||||
def unsubscribe(self, *args) -> None:
|
||||
def unsubscribe(self, *args: Any) -> None:
|
||||
"""Unsubscribe to repository events."""
|
||||
self._client.repos.events.unsubscribe(subscription_id=self._subscription_id)
|
||||
|
@@ -182,10 +182,10 @@ FAN_SPEED_MAX_SPEED_COUNT = 5
|
||||
|
||||
COVER_VALVE_STATES = {
|
||||
cover.DOMAIN: {
|
||||
"closed": cover.STATE_CLOSED,
|
||||
"closing": cover.STATE_CLOSING,
|
||||
"open": cover.STATE_OPEN,
|
||||
"opening": cover.STATE_OPENING,
|
||||
"closed": cover.CoverState.CLOSED.value,
|
||||
"closing": cover.CoverState.CLOSING.value,
|
||||
"open": cover.CoverState.OPEN.value,
|
||||
"opening": cover.CoverState.OPENING.value,
|
||||
},
|
||||
valve.DOMAIN: {
|
||||
"closed": valve.STATE_CLOSED,
|
||||
|
@@ -8,7 +8,12 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
@@ -40,6 +45,12 @@ class OAuth2FlowHandler(
|
||||
"prompt": "consent",
|
||||
}
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reconfiguration flow."""
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
@@ -60,6 +71,10 @@ class OAuth2FlowHandler(
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(), data=data
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_NAME,
|
||||
|
@@ -30,7 +30,8 @@
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
@@ -36,7 +36,7 @@ DEFAULT_URL = SERVER_URLS[0]
|
||||
|
||||
DOMAIN = "growatt_server"
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
LOGIN_INVALID_AUTH_CODE = "502"
|
||||
|
||||
|
@@ -210,6 +210,15 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_solar_generation_today",
|
||||
translation_key="tlx_solar_generation_today",
|
||||
api_key="epvToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_solar_generation_total",
|
||||
translation_key="tlx_solar_generation_total",
|
||||
@@ -430,4 +439,120 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_pac_to_local_load",
|
||||
translation_key="tlx_pac_to_local_load",
|
||||
api_key="pacToLocalLoad",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_pac_to_user_total",
|
||||
translation_key="tlx_pac_to_user_total",
|
||||
api_key="pacToUserTotal",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_pac_to_grid_total",
|
||||
translation_key="tlx_pac_to_grid_total",
|
||||
api_key="pacToGridTotal",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_system_production_today",
|
||||
translation_key="tlx_system_production_today",
|
||||
api_key="esystemToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_system_production_total",
|
||||
translation_key="tlx_system_production_total",
|
||||
api_key="esystemTotal",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
never_resets=True,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_self_consumption_today",
|
||||
translation_key="tlx_self_consumption_today",
|
||||
api_key="eselfToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_self_consumption_total",
|
||||
translation_key="tlx_self_consumption_total",
|
||||
api_key="eselfTotal",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
never_resets=True,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_import_from_grid_today",
|
||||
translation_key="tlx_import_from_grid_today",
|
||||
api_key="etoUserToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_import_from_grid_total",
|
||||
translation_key="tlx_import_from_grid_total",
|
||||
api_key="etoUserTotal",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
never_resets=True,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_batteries_charged_from_grid_today",
|
||||
translation_key="tlx_batteries_charged_from_grid_today",
|
||||
api_key="eacChargeToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_batteries_charged_from_grid_total",
|
||||
translation_key="tlx_batteries_charged_from_grid_total",
|
||||
api_key="eacChargeTotal",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
never_resets=True,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_p_system",
|
||||
translation_key="tlx_p_system",
|
||||
api_key="psystem",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_p_self",
|
||||
translation_key="tlx_p_self",
|
||||
api_key="pself",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
precision=1,
|
||||
),
|
||||
)
|
||||
|
@@ -362,6 +362,9 @@
|
||||
"tlx_wattage_input_4": {
|
||||
"name": "Input 4 wattage"
|
||||
},
|
||||
"tlx_solar_generation_today": {
|
||||
"name": "Solar energy today"
|
||||
},
|
||||
"tlx_solar_generation_total": {
|
||||
"name": "Lifetime total solar energy"
|
||||
},
|
||||
@@ -443,6 +446,45 @@
|
||||
"tlx_statement_of_charge": {
|
||||
"name": "State of charge (SoC)"
|
||||
},
|
||||
"tlx_pac_to_local_load": {
|
||||
"name": "Local load power"
|
||||
},
|
||||
"tlx_pac_to_user_total": {
|
||||
"name": "Import power"
|
||||
},
|
||||
"tlx_pac_to_grid_total": {
|
||||
"name": "Export power"
|
||||
},
|
||||
"tlx_system_production_today": {
|
||||
"name": "System production today"
|
||||
},
|
||||
"tlx_system_production_total": {
|
||||
"name": "Lifetime system production"
|
||||
},
|
||||
"tlx_self_consumption_today": {
|
||||
"name": "Self consumption today"
|
||||
},
|
||||
"tlx_self_consumption_total": {
|
||||
"name": "Lifetime self consumption"
|
||||
},
|
||||
"tlx_import_from_grid_today": {
|
||||
"name": "Import from grid today"
|
||||
},
|
||||
"tlx_import_from_grid_total": {
|
||||
"name": "Lifetime import from grid"
|
||||
},
|
||||
"tlx_batteries_charged_from_grid_today": {
|
||||
"name": "Batteries charged from grid today"
|
||||
},
|
||||
"tlx_batteries_charged_from_grid_total": {
|
||||
"name": "Lifetime batteries charged from grid"
|
||||
},
|
||||
"tlx_p_system": {
|
||||
"name": "System power"
|
||||
},
|
||||
"tlx_p_self": {
|
||||
"name": "Self power"
|
||||
},
|
||||
"total_money_today": {
|
||||
"name": "Total money today"
|
||||
},
|
||||
@@ -461,6 +503,11 @@
|
||||
"total_maximum_output": {
|
||||
"name": "Maximum power"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"ac_charge": {
|
||||
"name": "Charge from grid"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
138
homeassistant/components/growatt_server/switch.py
Normal file
138
homeassistant/components/growatt_server/switch.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""Switch platform for Growatt."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from growattServer import GrowattV1ApiError
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import GrowattConfigEntry, GrowattCoordinator
|
||||
from .sensor.sensor_entity_description import GrowattRequiredKeysMixin
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = (
|
||||
1 # Serialize updates as inverter does not handle concurrent requests
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GrowattSwitchEntityDescription(SwitchEntityDescription, GrowattRequiredKeysMixin):
|
||||
"""Describes Growatt switch entity."""
|
||||
|
||||
write_key: str | None = None # Parameter ID for writing (if different from api_key)
|
||||
|
||||
|
||||
# Note that the Growatt V1 API uses different keys for reading and writing parameters.
|
||||
# Reading values returns camelCase keys, while writing requires snake_case keys.
|
||||
|
||||
MIN_SWITCH_TYPES: tuple[GrowattSwitchEntityDescription, ...] = (
|
||||
GrowattSwitchEntityDescription(
|
||||
key="ac_charge",
|
||||
translation_key="ac_charge",
|
||||
api_key="acChargeEnable", # Key returned by V1 API
|
||||
write_key="ac_charge", # Key used to write parameter
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: GrowattConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Growatt switch entities."""
|
||||
runtime_data = entry.runtime_data
|
||||
|
||||
# Add switch entities for each MIN device (only supported with V1 API)
|
||||
async_add_entities(
|
||||
GrowattSwitch(device_coordinator, description)
|
||||
for device_coordinator in runtime_data.devices.values()
|
||||
if (
|
||||
device_coordinator.device_type == "min"
|
||||
and device_coordinator.api_version == "v1"
|
||||
)
|
||||
for description in MIN_SWITCH_TYPES
|
||||
)
|
||||
|
||||
|
||||
class GrowattSwitch(CoordinatorEntity[GrowattCoordinator], SwitchEntity):
|
||||
"""Representation of a Growatt switch."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
entity_description: GrowattSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: GrowattCoordinator,
|
||||
description: GrowattSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.device_id)},
|
||||
manufacturer="Growatt",
|
||||
name=coordinator.device_id,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the switch is on."""
|
||||
value = self.coordinator.data.get(self.entity_description.api_key)
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# API returns integer 1 for enabled, 0 for disabled
|
||||
return bool(value)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self._async_set_state(True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self._async_set_state(False)
|
||||
|
||||
async def _async_set_state(self, state: bool) -> None:
|
||||
"""Set the switch state."""
|
||||
# Use write_key if specified, otherwise fall back to api_key
|
||||
parameter_id = (
|
||||
self.entity_description.write_key or self.entity_description.api_key
|
||||
)
|
||||
api_value = int(state)
|
||||
|
||||
try:
|
||||
# Use V1 API to write parameter
|
||||
await self.hass.async_add_executor_job(
|
||||
self.coordinator.api.min_write_parameter,
|
||||
self.coordinator.device_id,
|
||||
parameter_id,
|
||||
api_value,
|
||||
)
|
||||
except GrowattV1ApiError as e:
|
||||
raise HomeAssistantError(f"Error while setting switch state: {e}") from e
|
||||
|
||||
# If no exception was raised, the write was successful
|
||||
_LOGGER.debug(
|
||||
"Set switch %s to %s",
|
||||
parameter_id,
|
||||
api_value,
|
||||
)
|
||||
|
||||
# Update the value in coordinator data (keep as integer like API returns)
|
||||
self.coordinator.data[self.entity_description.api_key] = api_value
|
||||
self.async_write_ha_state()
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
import math
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID
|
||||
|
||||
@@ -281,7 +282,7 @@ class HabiticaTodosListEntity(BaseHabiticaListEntity):
|
||||
return sorted(
|
||||
tasks,
|
||||
key=lambda task: (
|
||||
float("inf")
|
||||
math.inf
|
||||
if (uid := UUID(task.uid))
|
||||
not in (tasks_order := self.coordinator.data.user.tasksOrder.todos)
|
||||
else tasks_order.index(uid)
|
||||
@@ -367,7 +368,7 @@ class HabiticaDailiesListEntity(BaseHabiticaListEntity):
|
||||
return sorted(
|
||||
tasks,
|
||||
key=lambda task: (
|
||||
float("inf")
|
||||
math.inf
|
||||
if (uid := UUID(task.uid))
|
||||
not in (tasks_order := self.coordinator.data.user.tasksOrder.dailys)
|
||||
else tasks_order.index(uid)
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyheos"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyheos==1.0.5"],
|
||||
"requirements": ["pyheos==1.0.6"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-denon-com:device:ACT-Denon:1"
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.81", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.82", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -99,6 +99,20 @@ CLEANING_MODE_OPTIONS = {
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Silent",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Standard",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningModes.Power",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.IntelligentMode",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.VacuumOnly",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.MopOnly",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.VacuumAndMop",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.CleaningMode.MopAfterVacuum",
|
||||
)
|
||||
}
|
||||
|
||||
SUCTION_POWER_OPTIONS = {
|
||||
bsh_key_to_translation_key(option): option
|
||||
for option in (
|
||||
"ConsumerProducts.CleaningRobot.EnumType.SuctionPower.Silent",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.SuctionPower.Standard",
|
||||
"ConsumerProducts.CleaningRobot.EnumType.SuctionPower.Max",
|
||||
)
|
||||
}
|
||||
|
||||
@@ -309,6 +323,10 @@ PROGRAM_ENUM_OPTIONS = {
|
||||
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_CLEANING_MODE,
|
||||
CLEANING_MODE_OPTIONS,
|
||||
),
|
||||
(
|
||||
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_SUCTION_POWER,
|
||||
SUCTION_POWER_OPTIONS,
|
||||
),
|
||||
(OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_BEAN_AMOUNT, BEAN_AMOUNT_OPTIONS),
|
||||
(
|
||||
OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_COFFEE_TEMPERATURE,
|
||||
|
@@ -30,6 +30,7 @@ from .const import (
|
||||
INTENSIVE_LEVEL_OPTIONS,
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP,
|
||||
SPIN_SPEED_OPTIONS,
|
||||
SUCTION_POWER_OPTIONS,
|
||||
TEMPERATURE_OPTIONS,
|
||||
TRANSLATION_KEYS_PROGRAMS_MAP,
|
||||
VARIO_PERFECT_OPTIONS,
|
||||
@@ -168,6 +169,16 @@ PROGRAM_SELECT_OPTION_ENTITY_DESCRIPTIONS = (
|
||||
for translation_key, value in CLEANING_MODE_OPTIONS.items()
|
||||
},
|
||||
),
|
||||
HomeConnectSelectEntityDescription(
|
||||
key=OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_SUCTION_POWER,
|
||||
translation_key="suction_power",
|
||||
options=list(SUCTION_POWER_OPTIONS),
|
||||
translation_key_values=SUCTION_POWER_OPTIONS,
|
||||
values_translation_key={
|
||||
value: translation_key
|
||||
for translation_key, value in SUCTION_POWER_OPTIONS.items()
|
||||
},
|
||||
),
|
||||
HomeConnectSelectEntityDescription(
|
||||
key=OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_BEAN_AMOUNT,
|
||||
translation_key="bean_amount",
|
||||
|
@@ -202,6 +202,22 @@ set_program_and_options:
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_modes_silent
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_modes_standard
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_modes_power
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_intelligent_mode
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_only
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_only
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_and_mop
|
||||
- consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_after_vacuum
|
||||
consumer_products_cleaning_robot_option_suction_power:
|
||||
example: consumer_products_cleaning_robot_enum_type_suction_power_standard
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
mode: dropdown
|
||||
translation_key: suction_power
|
||||
options:
|
||||
- consumer_products_cleaning_robot_enum_type_suction_power_silent
|
||||
- consumer_products_cleaning_robot_enum_type_suction_power_standard
|
||||
- consumer_products_cleaning_robot_enum_type_suction_power_max
|
||||
coffee_maker_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
|
@@ -324,7 +324,19 @@
|
||||
"options": {
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_silent": "Silent",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_standard": "Standard",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_power": "Power"
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_power": "Power",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_intelligent_mode": "Intelligent mode",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_only": "Vacuum only",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_only": "Mop only",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_and_mop": "Vacuum and mop",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_after_vacuum": "Mop after vacuum"
|
||||
}
|
||||
},
|
||||
"suction_power": {
|
||||
"options": {
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_silent": "Silent",
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_standard": "Standard",
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_max": "Max"
|
||||
}
|
||||
},
|
||||
"bean_amount": {
|
||||
@@ -519,6 +531,10 @@
|
||||
"name": "Cleaning mode",
|
||||
"description": "Defines the favored cleaning mode."
|
||||
},
|
||||
"consumer_products_cleaning_robot_option_suction_power": {
|
||||
"name": "Suction power",
|
||||
"description": "Defines the suction power."
|
||||
},
|
||||
"consumer_products_coffee_maker_option_bean_amount": {
|
||||
"name": "Bean amount",
|
||||
"description": "Describes the amount of coffee beans used in a coffee machine program."
|
||||
@@ -1196,7 +1212,20 @@
|
||||
"state": {
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_silent": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_modes_silent%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_standard": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_modes_standard%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_power": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_modes_power%]"
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_modes_power": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_modes_power%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_intelligent_mode": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_intelligent_mode%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_only": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_only%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_only": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_only%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_and_mop": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_vacuum_and_mop%]",
|
||||
"consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_after_vacuum": "[%key:component::home_connect::selector::cleaning_mode::options::consumer_products_cleaning_robot_enum_type_cleaning_mode_mop_after_vacuum%]"
|
||||
}
|
||||
},
|
||||
"suction_power": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::consumer_products_cleaning_robot_option_suction_power::name%]",
|
||||
"state": {
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_silent": "[%key:component::home_connect::selector::suction_power::options::consumer_products_cleaning_robot_enum_type_suction_power_silent%]",
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_standard": "[%key:component::home_connect::selector::suction_power::options::consumer_products_cleaning_robot_enum_type_suction_power_standard%]",
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_max": "[%key:component::home_connect::selector::suction_power::options::consumer_products_cleaning_robot_enum_type_suction_power_max%]"
|
||||
}
|
||||
},
|
||||
"bean_amount": {
|
||||
|
@@ -7,11 +7,18 @@ from typing import TYPE_CHECKING, Any, Protocol
|
||||
|
||||
from homeassistant.components import usb
|
||||
from homeassistant.components.homeassistant_hardware import firmware_config_flow
|
||||
from homeassistant.components.homeassistant_hardware.helpers import (
|
||||
HardwareFirmwareDiscoveryInfo,
|
||||
)
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.usb import (
|
||||
usb_service_info_from_device,
|
||||
usb_unique_id_from_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigEntryBaseFlow,
|
||||
@@ -123,22 +130,16 @@ class HomeAssistantConnectZBT2ConfigFlow(
|
||||
|
||||
async def async_step_usb(self, discovery_info: UsbServiceInfo) -> ConfigFlowResult:
|
||||
"""Handle usb discovery."""
|
||||
device = discovery_info.device
|
||||
vid = discovery_info.vid
|
||||
pid = discovery_info.pid
|
||||
serial_number = discovery_info.serial_number
|
||||
manufacturer = discovery_info.manufacturer
|
||||
description = discovery_info.description
|
||||
unique_id = f"{vid}:{pid}_{serial_number}_{manufacturer}_{description}"
|
||||
unique_id = usb_unique_id_from_service_info(discovery_info)
|
||||
|
||||
device = discovery_info.device = await self.hass.async_add_executor_job(
|
||||
discovery_info.device = await self.hass.async_add_executor_job(
|
||||
usb.get_serial_by_id, discovery_info.device
|
||||
)
|
||||
|
||||
try:
|
||||
await self.async_set_unique_id(unique_id)
|
||||
finally:
|
||||
self._abort_if_unique_id_configured(updates={DEVICE: device})
|
||||
self._abort_if_unique_id_configured(updates={DEVICE: discovery_info.device})
|
||||
|
||||
self._usb_info = discovery_info
|
||||
|
||||
@@ -148,6 +149,24 @@ class HomeAssistantConnectZBT2ConfigFlow(
|
||||
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_step_import(
|
||||
self, fw_discovery_info: HardwareFirmwareDiscoveryInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle import from ZHA/OTBR firmware notification."""
|
||||
assert fw_discovery_info["usb_device"] is not None
|
||||
usb_info = usb_service_info_from_device(fw_discovery_info["usb_device"])
|
||||
unique_id = usb_unique_id_from_service_info(usb_info)
|
||||
|
||||
if await self.async_set_unique_id(unique_id, raise_on_progress=False):
|
||||
self._abort_if_unique_id_configured(updates={DEVICE: usb_info.device})
|
||||
|
||||
self._usb_info = usb_info
|
||||
self._device = usb_info.device
|
||||
self._hardware_name = HARDWARE_NAME
|
||||
self._probed_firmware_info = fw_discovery_info["firmware_info"]
|
||||
|
||||
return self._async_flow_finished()
|
||||
|
||||
def _async_flow_finished(self) -> ConfigFlowResult:
|
||||
"""Create the config entry."""
|
||||
assert self._usb_info is not None
|
||||
|
@@ -13,6 +13,12 @@
|
||||
"pid": "4001",
|
||||
"description": "*zbt-2*",
|
||||
"known_devices": ["ZBT-2"]
|
||||
},
|
||||
{
|
||||
"vid": "303A",
|
||||
"pid": "831A",
|
||||
"description": "*zbt-2*",
|
||||
"known_devices": ["ZBT-2"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -19,6 +19,12 @@ DATA_COMPONENT: HassKey[HardwareInfoDispatcher] = HassKey(DOMAIN)
|
||||
ZHA_DOMAIN = "zha"
|
||||
OTBR_DOMAIN = "otbr"
|
||||
|
||||
HARDWARE_INTEGRATION_DOMAINS = {
|
||||
"homeassistant_sky_connect",
|
||||
"homeassistant_connect_zbt2",
|
||||
"homeassistant_yellow",
|
||||
}
|
||||
|
||||
OTBR_ADDON_NAME = "OpenThread Border Router"
|
||||
OTBR_ADDON_MANAGER_DATA = "openthread_border_router"
|
||||
OTBR_ADDON_SLUG = "core_openthread_border_router"
|
||||
|
@@ -1,19 +1,38 @@
|
||||
"""Home Assistant Hardware integration helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Awaitable, Callable
|
||||
from contextlib import asynccontextmanager
|
||||
import logging
|
||||
from typing import Protocol
|
||||
from typing import TYPE_CHECKING, Protocol, TypedDict
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.components.usb import (
|
||||
USBDevice,
|
||||
async_get_usb_matchers_for_device,
|
||||
usb_device_from_path,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback
|
||||
|
||||
from . import DATA_COMPONENT
|
||||
from .util import FirmwareInfo
|
||||
from .const import HARDWARE_INTEGRATION_DOMAINS
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .util import FirmwareInfo
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HardwareFirmwareDiscoveryInfo(TypedDict):
|
||||
"""Data for triggering hardware integration discovery via firmware notification."""
|
||||
|
||||
usb_device: USBDevice | None
|
||||
firmware_info: FirmwareInfo
|
||||
|
||||
|
||||
class SyncHardwareFirmwareInfoModule(Protocol):
|
||||
"""Protocol type for Home Assistant Hardware firmware info platform modules."""
|
||||
|
||||
@@ -41,6 +60,23 @@ type HardwareFirmwareInfoModule = (
|
||||
)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_get_hardware_domain_for_usb_device(
|
||||
hass: HomeAssistant, usb_device: USBDevice
|
||||
) -> str | None:
|
||||
"""Identify which hardware domain should handle a USB device."""
|
||||
matched = async_get_usb_matchers_for_device(hass, usb_device)
|
||||
hw_domains = {match["domain"] for match in matched} & HARDWARE_INTEGRATION_DOMAINS
|
||||
|
||||
if not hw_domains:
|
||||
return None
|
||||
|
||||
# We can never have two hardware integrations overlap in discovery
|
||||
assert len(hw_domains) == 1
|
||||
|
||||
return list(hw_domains)[0]
|
||||
|
||||
|
||||
class HardwareInfoDispatcher:
|
||||
"""Central dispatcher for hardware/firmware information."""
|
||||
|
||||
@@ -51,6 +87,7 @@ class HardwareInfoDispatcher:
|
||||
self._notification_callbacks: defaultdict[
|
||||
str, set[Callable[[FirmwareInfo], None]]
|
||||
] = defaultdict(set)
|
||||
self._active_firmware_updates: dict[str, str] = {}
|
||||
|
||||
def register_firmware_info_provider(
|
||||
self, domain: str, platform: HardwareFirmwareInfoModule
|
||||
@@ -88,7 +125,7 @@ class HardwareInfoDispatcher:
|
||||
"Received firmware info notification from %r: %s", domain, firmware_info
|
||||
)
|
||||
|
||||
for callback in self._notification_callbacks.get(firmware_info.device, []):
|
||||
for callback in list(self._notification_callbacks[firmware_info.device]):
|
||||
try:
|
||||
callback(firmware_info)
|
||||
except Exception:
|
||||
@@ -96,6 +133,48 @@ class HardwareInfoDispatcher:
|
||||
"Error while notifying firmware info listener %s", callback
|
||||
)
|
||||
|
||||
await self._async_trigger_hardware_discovery(firmware_info)
|
||||
|
||||
async def _async_trigger_hardware_discovery(
|
||||
self, firmware_info: FirmwareInfo
|
||||
) -> None:
|
||||
"""Trigger hardware integration config flows from firmware info.
|
||||
|
||||
Identifies which hardware integration should handle the device based on
|
||||
USB matchers, then triggers an import flow for only that integration.
|
||||
"""
|
||||
|
||||
usb_device = await self.hass.async_add_executor_job(
|
||||
usb_device_from_path, firmware_info.device
|
||||
)
|
||||
|
||||
if usb_device is None:
|
||||
_LOGGER.debug("Cannot find USB for path %s", firmware_info.device)
|
||||
return
|
||||
|
||||
hardware_domain = async_get_hardware_domain_for_usb_device(
|
||||
self.hass, usb_device
|
||||
)
|
||||
|
||||
if hardware_domain is None:
|
||||
_LOGGER.debug("No hardware integration found for device %s", usb_device)
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"Triggering %s import flow for device %s",
|
||||
hardware_domain,
|
||||
firmware_info.device,
|
||||
)
|
||||
|
||||
await self.hass.config_entries.flow.async_init(
|
||||
hardware_domain,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=HardwareFirmwareDiscoveryInfo(
|
||||
usb_device=usb_device,
|
||||
firmware_info=firmware_info,
|
||||
),
|
||||
)
|
||||
|
||||
async def iter_firmware_info(self) -> AsyncIterator[FirmwareInfo]:
|
||||
"""Iterate over all firmware information for all hardware."""
|
||||
for domain, fw_info_module in self._providers.items():
|
||||
@@ -118,6 +197,36 @@ class HardwareInfoDispatcher:
|
||||
if fw_info is not None:
|
||||
yield fw_info
|
||||
|
||||
def register_firmware_update_in_progress(
|
||||
self, device: str, source_domain: str
|
||||
) -> None:
|
||||
"""Register that a firmware update is in progress for a device."""
|
||||
if device in self._active_firmware_updates:
|
||||
current_domain = self._active_firmware_updates[device]
|
||||
raise ValueError(
|
||||
f"Firmware update already in progress for {device} by {current_domain}"
|
||||
)
|
||||
self._active_firmware_updates[device] = source_domain
|
||||
|
||||
def unregister_firmware_update_in_progress(
|
||||
self, device: str, source_domain: str
|
||||
) -> None:
|
||||
"""Unregister a firmware update for a device."""
|
||||
if device not in self._active_firmware_updates:
|
||||
raise ValueError(f"No firmware update in progress for {device}")
|
||||
|
||||
if self._active_firmware_updates[device] != source_domain:
|
||||
current_domain = self._active_firmware_updates[device]
|
||||
raise ValueError(
|
||||
f"Firmware update for {device} is owned by {current_domain}, not {source_domain}"
|
||||
)
|
||||
|
||||
del self._active_firmware_updates[device]
|
||||
|
||||
def is_firmware_update_in_progress(self, device: str) -> bool:
|
||||
"""Check if a firmware update is in progress for a device."""
|
||||
return device in self._active_firmware_updates
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_register_firmware_info_provider(
|
||||
@@ -141,3 +250,42 @@ def async_notify_firmware_info(
|
||||
) -> Awaitable[None]:
|
||||
"""Notify the dispatcher of new firmware information."""
|
||||
return hass.data[DATA_COMPONENT].notify_firmware_info(domain, firmware_info)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_register_firmware_update_in_progress(
|
||||
hass: HomeAssistant, device: str, source_domain: str
|
||||
) -> None:
|
||||
"""Register that a firmware update is in progress for a device."""
|
||||
return hass.data[DATA_COMPONENT].register_firmware_update_in_progress(
|
||||
device, source_domain
|
||||
)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_unregister_firmware_update_in_progress(
|
||||
hass: HomeAssistant, device: str, source_domain: str
|
||||
) -> None:
|
||||
"""Unregister a firmware update for a device."""
|
||||
return hass.data[DATA_COMPONENT].unregister_firmware_update_in_progress(
|
||||
device, source_domain
|
||||
)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_is_firmware_update_in_progress(hass: HomeAssistant, device: str) -> bool:
|
||||
"""Check if a firmware update is in progress for a device."""
|
||||
return hass.data[DATA_COMPONENT].is_firmware_update_in_progress(device)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_firmware_update_context(
|
||||
hass: HomeAssistant, device: str, source_domain: str
|
||||
) -> AsyncIterator[None]:
|
||||
"""Register a device as having its firmware being actively updated."""
|
||||
async_register_firmware_update_in_progress(hass, device, source_domain)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
async_unregister_firmware_update_in_progress(hass, device, source_domain)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user