Compare commits

..

4 Commits

Author SHA1 Message Date
jbouwh
92e8ba4012 Fix rebase issues 2025-09-28 14:37:56 +00:00
jbouwh
79bd458c81 Fix Ruff - add absolute humidity 2025-09-28 13:55:20 +00:00
Jan Bouwhuis
9261c89339 Update homeassistant/components/sensor/strings.json
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-28 13:55:20 +00:00
jbouwh
3deca53a7a Add selectors for sensor device and state class 2025-09-28 13:55:20 +00:00
1109 changed files with 17249 additions and 54462 deletions

View File

@@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -257,7 +257,7 @@ jobs:
fi fi
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -332,14 +332,14 @@ jobs:
- name: Login to DockerHub - name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant' if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant' if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -504,7 +504,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}

File diff suppressed because it is too large Load Diff

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7 uses: github/codeql-action/init@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
with: with:
languages: python languages: python
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7 uses: github/codeql-action/analyze@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
with: with:
category: "/language:python" category: "/language:python"

View File

@@ -17,7 +17,7 @@ jobs:
# - No PRs marked as no-stale # - No PRs marked as no-stale
# - No issues (-1) # - No issues (-1)
- name: 60 days stale PRs policy - name: 60 days stale PRs policy
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 60 days-before-stale: 60
@@ -57,7 +57,7 @@ jobs:
# - No issues marked as no-stale or help-wanted # - No issues marked as no-stale or help-wanted
# - No PRs (-1) # - No PRs (-1)
- name: 90 days stale issues - name: 90 days stale issues
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
with: with:
repo-token: ${{ steps.token.outputs.token }} repo-token: ${{ steps.token.outputs.token }}
days-before-stale: 90 days-before-stale: 90
@@ -87,7 +87,7 @@ jobs:
# - No Issues marked as no-stale or help-wanted # - No Issues marked as no-stale or help-wanted
# - No PRs (-1) # - No PRs (-1)
- name: Needs more information stale issues policy - name: Needs more information stale issues policy
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
with: with:
repo-token: ${{ steps.token.outputs.token }} repo-token: ${{ steps.token.outputs.token }}
only-labels: "needs-more-information" only-labels: "needs-more-information"

View File

@@ -203,7 +203,6 @@ homeassistant.components.feedreader.*
homeassistant.components.file_upload.* homeassistant.components.file_upload.*
homeassistant.components.filesize.* homeassistant.components.filesize.*
homeassistant.components.filter.* homeassistant.components.filter.*
homeassistant.components.firefly_iii.*
homeassistant.components.fitbit.* homeassistant.components.fitbit.*
homeassistant.components.flexit_bacnet.* homeassistant.components.flexit_bacnet.*
homeassistant.components.flux_led.* homeassistant.components.flux_led.*
@@ -221,7 +220,6 @@ homeassistant.components.generic_thermostat.*
homeassistant.components.geo_location.* homeassistant.components.geo_location.*
homeassistant.components.geocaching.* homeassistant.components.geocaching.*
homeassistant.components.gios.* homeassistant.components.gios.*
homeassistant.components.github.*
homeassistant.components.glances.* homeassistant.components.glances.*
homeassistant.components.go2rtc.* homeassistant.components.go2rtc.*
homeassistant.components.goalzero.* homeassistant.components.goalzero.*
@@ -327,7 +325,6 @@ homeassistant.components.london_underground.*
homeassistant.components.lookin.* homeassistant.components.lookin.*
homeassistant.components.lovelace.* homeassistant.components.lovelace.*
homeassistant.components.luftdaten.* homeassistant.components.luftdaten.*
homeassistant.components.lunatone.*
homeassistant.components.madvr.* homeassistant.components.madvr.*
homeassistant.components.manual.* homeassistant.components.manual.*
homeassistant.components.mastodon.* homeassistant.components.mastodon.*
@@ -556,7 +553,6 @@ homeassistant.components.vacuum.*
homeassistant.components.vallox.* homeassistant.components.vallox.*
homeassistant.components.valve.* homeassistant.components.valve.*
homeassistant.components.velbus.* homeassistant.components.velbus.*
homeassistant.components.vivotek.*
homeassistant.components.vlc_telnet.* homeassistant.components.vlc_telnet.*
homeassistant.components.vodafone_station.* homeassistant.components.vodafone_station.*
homeassistant.components.volvo.* homeassistant.components.volvo.*

View File

@@ -1 +0,0 @@
.github/copilot-instructions.md

22
CODEOWNERS generated
View File

@@ -492,8 +492,6 @@ build.json @home-assistant/supervisor
/tests/components/filesize/ @gjohansson-ST /tests/components/filesize/ @gjohansson-ST
/homeassistant/components/filter/ @dgomes /homeassistant/components/filter/ @dgomes
/tests/components/filter/ @dgomes /tests/components/filter/ @dgomes
/homeassistant/components/firefly_iii/ @erwindouna
/tests/components/firefly_iii/ @erwindouna
/homeassistant/components/fireservicerota/ @cyberjunky /homeassistant/components/fireservicerota/ @cyberjunky
/tests/components/fireservicerota/ @cyberjunky /tests/components/fireservicerota/ @cyberjunky
/homeassistant/components/firmata/ @DaAwesomeP /homeassistant/components/firmata/ @DaAwesomeP
@@ -762,8 +760,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz /homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz /tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
/homeassistant/components/intesishome/ @jnimmo /homeassistant/components/intesishome/ @jnimmo
/homeassistant/components/iometer/ @jukrebs /homeassistant/components/iometer/ @MaestroOnICe
/tests/components/iometer/ @jukrebs /tests/components/iometer/ @MaestroOnICe
/homeassistant/components/ios/ @robbiet480 /homeassistant/components/ios/ @robbiet480
/tests/components/ios/ @robbiet480 /tests/components/ios/ @robbiet480
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard /homeassistant/components/iotawatt/ @gtdiehl @jyavenard
@@ -910,8 +908,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/luci/ @mzdrale /homeassistant/components/luci/ @mzdrale
/homeassistant/components/luftdaten/ @fabaff @frenck /homeassistant/components/luftdaten/ @fabaff @frenck
/tests/components/luftdaten/ @fabaff @frenck /tests/components/luftdaten/ @fabaff @frenck
/homeassistant/components/lunatone/ @MoonDevLT
/tests/components/lunatone/ @MoonDevLT
/homeassistant/components/lupusec/ @majuss @suaveolent /homeassistant/components/lupusec/ @majuss @suaveolent
/tests/components/lupusec/ @majuss @suaveolent /tests/components/lupusec/ @majuss @suaveolent
/homeassistant/components/lutron/ @cdheiser @wilburCForce /homeassistant/components/lutron/ @cdheiser @wilburCForce
@@ -957,8 +953,6 @@ build.json @home-assistant/supervisor
/tests/components/met_eireann/ @DylanGore /tests/components/met_eireann/ @DylanGore
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame /homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame /tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/homeassistant/components/meteo_lt/ @xE1H
/tests/components/meteo_lt/ @xE1H
/homeassistant/components/meteoalarm/ @rolfberkenbosch /homeassistant/components/meteoalarm/ @rolfberkenbosch
/homeassistant/components/meteoclimatic/ @adrianmo /homeassistant/components/meteoclimatic/ @adrianmo
/tests/components/meteoclimatic/ @adrianmo /tests/components/meteoclimatic/ @adrianmo
@@ -1065,8 +1059,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/nilu/ @hfurubotten /homeassistant/components/nilu/ @hfurubotten
/homeassistant/components/nina/ @DeerMaximum /homeassistant/components/nina/ @DeerMaximum
/tests/components/nina/ @DeerMaximum /tests/components/nina/ @DeerMaximum
/homeassistant/components/nintendo_parental_controls/ @pantherale0
/tests/components/nintendo_parental_controls/ @pantherale0
/homeassistant/components/nissan_leaf/ @filcole /homeassistant/components/nissan_leaf/ @filcole
/homeassistant/components/noaa_tides/ @jdelaney72 /homeassistant/components/noaa_tides/ @jdelaney72
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
@@ -1198,6 +1190,8 @@ build.json @home-assistant/supervisor
/tests/components/plex/ @jjlawren /tests/components/plex/ @jjlawren
/homeassistant/components/plugwise/ @CoMPaTech @bouwew /homeassistant/components/plugwise/ @CoMPaTech @bouwew
/tests/components/plugwise/ @CoMPaTech @bouwew /tests/components/plugwise/ @CoMPaTech @bouwew
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
/homeassistant/components/point/ @fredrike /homeassistant/components/point/ @fredrike
/tests/components/point/ @fredrike /tests/components/point/ @fredrike
/homeassistant/components/pooldose/ @lmaertin /homeassistant/components/pooldose/ @lmaertin
@@ -1413,8 +1407,8 @@ build.json @home-assistant/supervisor
/tests/components/sfr_box/ @epenet /tests/components/sfr_box/ @epenet
/homeassistant/components/sftp_storage/ @maretodoric /homeassistant/components/sftp_storage/ @maretodoric
/tests/components/sftp_storage/ @maretodoric /tests/components/sftp_storage/ @maretodoric
/homeassistant/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre /homeassistant/components/sharkiq/ @JeffResc @funkybunch
/tests/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre /tests/components/sharkiq/ @JeffResc @funkybunch
/homeassistant/components/shell_command/ @home-assistant/core /homeassistant/components/shell_command/ @home-assistant/core
/tests/components/shell_command/ @home-assistant/core /tests/components/shell_command/ @home-assistant/core
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco /homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
@@ -1479,8 +1473,8 @@ build.json @home-assistant/supervisor
/tests/components/snoo/ @Lash-L /tests/components/snoo/ @Lash-L
/homeassistant/components/snooz/ @AustinBrunkhorst /homeassistant/components/snooz/ @AustinBrunkhorst
/tests/components/snooz/ @AustinBrunkhorst /tests/components/snooz/ @AustinBrunkhorst
/homeassistant/components/solaredge/ @frenck @bdraco @tronikos /homeassistant/components/solaredge/ @frenck @bdraco
/tests/components/solaredge/ @frenck @bdraco @tronikos /tests/components/solaredge/ @frenck @bdraco
/homeassistant/components/solaredge_local/ @drobtravels @scheric /homeassistant/components/solaredge_local/ @drobtravels @scheric
/homeassistant/components/solarlog/ @Ernst79 @dontinelli /homeassistant/components/solarlog/ @Ernst79 @dontinelli
/tests/components/solarlog/ @Ernst79 @dontinelli /tests/components/solarlog/ @Ernst79 @dontinelli

View File

@@ -34,11 +34,9 @@ WORKDIR /usr/src
COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
RUN uv python install 3.13.2
USER vscode USER vscode
ENV UV_PYTHON=3.13.2
RUN uv python install
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv" ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
RUN uv venv $VIRTUAL_ENV RUN uv venv $VIRTUAL_ENV
ENV PATH="$VIRTUAL_ENV/bin:$PATH" ENV PATH="$VIRTUAL_ENV/bin:$PATH"

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant image: ghcr.io/home-assistant/{arch}-homeassistant
build_from: build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0 aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0 armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0 armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0 amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0 i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
codenotary: codenotary:
signer: notary@home-assistant.io signer: notary@home-assistant.io
base_image: notary@home-assistant.io base_image: notary@home-assistant.io

View File

@@ -616,34 +616,34 @@ async def async_enable_logging(
), ),
) )
logger = logging.getLogger() # Log errors to a file if we have write access to file or config dir
logger.setLevel(logging.INFO if verbose else logging.WARNING)
if log_file is None: if log_file is None:
default_log_path = hass.config.path(ERROR_LOG_FILENAME) err_log_path = hass.config.path(ERROR_LOG_FILENAME)
if "SUPERVISOR" in os.environ:
_LOGGER.info("Running in Supervisor, not logging to file")
# Rename the default log file if it exists, since previous versions created
# it even on Supervisor
if os.path.isfile(default_log_path):
with contextlib.suppress(OSError):
os.rename(default_log_path, f"{default_log_path}.old")
err_log_path = None
else:
err_log_path = default_log_path
else: else:
err_log_path = os.path.abspath(log_file) err_log_path = os.path.abspath(log_file)
if err_log_path: err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK)
):
err_handler = await hass.async_add_executor_job( err_handler = await hass.async_add_executor_job(
_create_log_file, err_log_path, log_rotate_days _create_log_file, err_log_path, log_rotate_days
) )
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
logger = logging.getLogger()
logger.addHandler(err_handler) logger.addHandler(err_handler)
logger.setLevel(logging.INFO if verbose else logging.WARNING)
# Save the log file location for access by other components. # Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path hass.data[DATA_LOGGING] = err_log_path
else:
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
async_activate_log_queue_handler(hass) async_activate_log_queue_handler(hass)

View File

@@ -1,5 +0,0 @@
{
"domain": "eltako",
"name": "Eltako",
"iot_standards": ["matter"]
}

View File

@@ -0,0 +1,5 @@
{
"domain": "ibm",
"name": "IBM",
"integrations": ["watson_iot", "watson_tts"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "konnected",
"name": "Konnected",
"integrations": ["konnected", "konnected_esphome"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "level",
"name": "Level",
"iot_standards": ["matter"]
}

View File

@@ -12,13 +12,11 @@ from homeassistant.components.bluetooth import async_get_scanner
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ADDRESS from homeassistant.const import CONF_ADDRESS
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import CONF_IS_NEW_STYLE_SCALE from .const import CONF_IS_NEW_STYLE_SCALE
SCAN_INTERVAL = timedelta(seconds=15) SCAN_INTERVAL = timedelta(seconds=15)
UPDATE_DEBOUNCE_TIME = 0.2
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -40,19 +38,11 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
config_entry=entry, config_entry=entry,
) )
debouncer = Debouncer(
hass=hass,
logger=_LOGGER,
cooldown=UPDATE_DEBOUNCE_TIME,
immediate=True,
function=self.async_update_listeners,
)
self._scale = AcaiaScale( self._scale = AcaiaScale(
address_or_ble_device=entry.data[CONF_ADDRESS], address_or_ble_device=entry.data[CONF_ADDRESS],
name=entry.title, name=entry.title,
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE], is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
notify_callback=debouncer.async_schedule_call, notify_callback=self.async_update_listeners,
scanner=async_get_scanner(hass), scanner=async_get_scanner(hass),
) )

View File

@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
} }
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10) UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6) UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30) UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)

View File

@@ -1,9 +1,6 @@
{ {
"entity": { "entity": {
"sensor": { "sensor": {
"air_quality": {
"default": "mdi:air-filter"
},
"cloud_ceiling": { "cloud_ceiling": {
"default": "mdi:weather-fog" "default": "mdi:weather-fog"
}, },
@@ -37,6 +34,9 @@
"thunderstorm_probability_night": { "thunderstorm_probability_night": {
"default": "mdi:weather-lightning" "default": "mdi:weather-lightning"
}, },
"translation_key": {
"default": "mdi:air-filter"
},
"tree_pollen": { "tree_pollen": {
"default": "mdi:tree-outline" "default": "mdi:tree-outline"
}, },

View File

@@ -71,14 +71,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
} }
) )
return self.async_show_form( return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
step_id="user",
data_schema=schema,
errors=errors,
description_placeholders={
"api_key_url": "https://opendata.aemet.es/centrodedescargas/altaUsuario"
},
)
@staticmethod @staticmethod
@callback @callback

View File

@@ -14,7 +14,7 @@
"longitude": "[%key:common::config_flow::data::longitude%]", "longitude": "[%key:common::config_flow::data::longitude%]",
"name": "Name of the integration" "name": "Name of the integration"
}, },
"description": "To generate API key go to {api_key_url}" "description": "To generate API key go to https://opendata.aemet.es/centrodedescargas/altaUsuario"
} }
} }
}, },

View File

@@ -1,9 +1,7 @@
"""Airgradient Update platform.""" """Airgradient Update platform."""
from datetime import timedelta from datetime import timedelta
import logging
from airgradient import AirGradientConnectionError
from propcache.api import cached_property from propcache.api import cached_property
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
@@ -15,7 +13,6 @@ from .entity import AirGradientEntity
PARALLEL_UPDATES = 1 PARALLEL_UPDATES = 1
SCAN_INTERVAL = timedelta(hours=1) SCAN_INTERVAL = timedelta(hours=1)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry( async def async_setup_entry(
@@ -34,7 +31,6 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
"""Representation of Airgradient Update.""" """Representation of Airgradient Update."""
_attr_device_class = UpdateDeviceClass.FIRMWARE _attr_device_class = UpdateDeviceClass.FIRMWARE
_server_unreachable_logged = False
def __init__(self, coordinator: AirGradientCoordinator) -> None: def __init__(self, coordinator: AirGradientCoordinator) -> None:
"""Initialize the entity.""" """Initialize the entity."""
@@ -51,27 +47,10 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
"""Return the installed version of the entity.""" """Return the installed version of the entity."""
return self.coordinator.data.measures.firmware_version return self.coordinator.data.measures.firmware_version
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._attr_available
async def async_update(self) -> None: async def async_update(self) -> None:
"""Update the entity.""" """Update the entity."""
try: self._attr_latest_version = (
self._attr_latest_version = ( await self.coordinator.client.get_latest_firmware_version(
await self.coordinator.client.get_latest_firmware_version( self.coordinator.serial_number
self.coordinator.serial_number
)
) )
except AirGradientConnectionError: )
self._attr_latest_version = None
self._attr_available = False
if not self._server_unreachable_logged:
_LOGGER.error(
"Unable to connect to AirGradient server to check for updates"
)
self._server_unreachable_logged = True
else:
self._server_unreachable_logged = False
self._attr_available = True

View File

@@ -18,10 +18,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS
DESCRIPTION_PLACEHOLDERS = {
"developer_registration_url": "https://developer.airly.eu/register",
}
class AirlyFlowHandler(ConfigFlow, domain=DOMAIN): class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
"""Config flow for Airly.""" """Config flow for Airly."""
@@ -89,7 +85,6 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
} }
), ),
errors=errors, errors=errors,
description_placeholders=DESCRIPTION_PLACEHOLDERS,
) )

View File

@@ -2,7 +2,7 @@
"config": { "config": {
"step": { "step": {
"user": { "user": {
"description": "To generate API key go to {developer_registration_url}", "description": "To generate API key go to https://developer.airly.eu/register",
"data": { "data": {
"name": "[%key:common::config_flow::data::name%]", "name": "[%key:common::config_flow::data::name%]",
"api_key": "[%key:common::config_flow::data::api_key%]", "api_key": "[%key:common::config_flow::data::api_key%]",

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos", "documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling", "iot_class": "local_polling",
"quality_scale": "bronze", "quality_scale": "bronze",
"requirements": ["airos==0.5.5"] "requirements": ["airos==0.5.3"]
} }

View File

@@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
} }
) )
URL_API_INTEGRATION = {
"url": "https://dashboard.airthings.com/integrations/api-integration"
}
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Airthings.""" """Handle a config flow for Airthings."""
@@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form( return self.async_show_form(
step_id="user", step_id="user",
data_schema=STEP_USER_DATA_SCHEMA, data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders=URL_API_INTEGRATION, description_placeholders={
"url": (
"https://dashboard.airthings.com/integrations/api-integration"
),
},
) )
errors = {} errors = {}
@@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_create_entry(title="Airthings", data=user_input) return self.async_create_entry(title="Airthings", data=user_input)
return self.async_show_form( return self.async_show_form(
step_id="user", step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors,
description_placeholders=URL_API_INTEGRATION,
) )

View File

@@ -4,9 +4,9 @@
"user": { "user": {
"data": { "data": {
"id": "ID", "id": "ID",
"secret": "Secret" "secret": "Secret",
}, "description": "Login at {url} to find your credentials"
"description": "Log in at {url} to find your credentials" }
} }
}, },
"error": { "error": {

View File

@@ -6,13 +6,8 @@ import dataclasses
import logging import logging
from typing import Any from typing import Any
from airthings_ble import ( from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
AirthingsBluetoothDeviceData,
AirthingsDevice,
UnsupportedDeviceError,
)
from bleak import BleakError from bleak import BleakError
from habluetooth import BluetoothServiceInfoBleak
import voluptuous as vol import voluptuous as vol
from homeassistant.components import bluetooth from homeassistant.components import bluetooth
@@ -32,7 +27,6 @@ SERVICE_UUIDS = [
"b42e4a8e-ade7-11e4-89d3-123b93f75cba", "b42e4a8e-ade7-11e4-89d3-123b93f75cba",
"b42e1c08-ade7-11e4-89d3-123b93f75cba", "b42e1c08-ade7-11e4-89d3-123b93f75cba",
"b42e3882-ade7-11e4-89d3-123b93f75cba", "b42e3882-ade7-11e4-89d3-123b93f75cba",
"b42e90a2-ade7-11e4-89d3-123b93f75cba",
] ]
@@ -43,7 +37,6 @@ class Discovery:
name: str name: str
discovery_info: BluetoothServiceInfo discovery_info: BluetoothServiceInfo
device: AirthingsDevice device: AirthingsDevice
data: AirthingsBluetoothDeviceData
def get_name(device: AirthingsDevice) -> str: def get_name(device: AirthingsDevice) -> str:
@@ -51,7 +44,7 @@ def get_name(device: AirthingsDevice) -> str:
name = device.friendly_name() name = device.friendly_name()
if identifier := device.identifier: if identifier := device.identifier:
name += f" ({device.model.value}{identifier})" name += f" ({identifier})"
return name return name
@@ -69,8 +62,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
self._discovered_device: Discovery | None = None self._discovered_device: Discovery | None = None
self._discovered_devices: dict[str, Discovery] = {} self._discovered_devices: dict[str, Discovery] = {}
async def _get_device( async def _get_device_data(
self, data: AirthingsBluetoothDeviceData, discovery_info: BluetoothServiceInfo self, discovery_info: BluetoothServiceInfo
) -> AirthingsDevice: ) -> AirthingsDevice:
ble_device = bluetooth.async_ble_device_from_address( ble_device = bluetooth.async_ble_device_from_address(
self.hass, discovery_info.address self.hass, discovery_info.address
@@ -79,8 +72,10 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.debug("no ble_device in _get_device_data") _LOGGER.debug("no ble_device in _get_device_data")
raise AirthingsDeviceUpdateError("No ble_device") raise AirthingsDeviceUpdateError("No ble_device")
airthings = AirthingsBluetoothDeviceData(_LOGGER)
try: try:
device = await data.update_device(ble_device) data = await airthings.update_device(ble_device)
except BleakError as err: except BleakError as err:
_LOGGER.error( _LOGGER.error(
"Error connecting to and getting data from %s: %s", "Error connecting to and getting data from %s: %s",
@@ -88,15 +83,12 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
err, err,
) )
raise AirthingsDeviceUpdateError("Failed getting device data") from err raise AirthingsDeviceUpdateError("Failed getting device data") from err
except UnsupportedDeviceError:
_LOGGER.debug("Skipping unsupported device: %s", discovery_info.name)
raise
except Exception as err: except Exception as err:
_LOGGER.error( _LOGGER.error(
"Unknown error occurred from %s: %s", discovery_info.address, err "Unknown error occurred from %s: %s", discovery_info.address, err
) )
raise raise
return device return data
async def async_step_bluetooth( async def async_step_bluetooth(
self, discovery_info: BluetoothServiceInfo self, discovery_info: BluetoothServiceInfo
@@ -106,21 +98,17 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
await self.async_set_unique_id(discovery_info.address) await self.async_set_unique_id(discovery_info.address)
self._abort_if_unique_id_configured() self._abort_if_unique_id_configured()
data = AirthingsBluetoothDeviceData(logger=_LOGGER)
try: try:
device = await self._get_device(data=data, discovery_info=discovery_info) device = await self._get_device_data(discovery_info)
except AirthingsDeviceUpdateError: except AirthingsDeviceUpdateError:
return self.async_abort(reason="cannot_connect") return self.async_abort(reason="cannot_connect")
except UnsupportedDeviceError:
return self.async_abort(reason="unsupported_device")
except Exception: except Exception:
_LOGGER.exception("Unknown error occurred") _LOGGER.exception("Unknown error occurred")
return self.async_abort(reason="unknown") return self.async_abort(reason="unknown")
name = get_name(device) name = get_name(device)
self.context["title_placeholders"] = {"name": name} self.context["title_placeholders"] = {"name": name}
self._discovered_device = Discovery(name, discovery_info, device, data=data) self._discovered_device = Discovery(name, discovery_info, device)
return await self.async_step_bluetooth_confirm() return await self.async_step_bluetooth_confirm()
@@ -129,12 +117,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Confirm discovery.""" """Confirm discovery."""
if user_input is not None: if user_input is not None:
if (
self._discovered_device is not None
and self._discovered_device.device.firmware.need_firmware_upgrade
):
return self.async_abort(reason="firmware_upgrade_required")
return self.async_create_entry( return self.async_create_entry(
title=self.context["title_placeholders"]["name"], data={} title=self.context["title_placeholders"]["name"], data={}
) )
@@ -155,9 +137,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
self._abort_if_unique_id_configured() self._abort_if_unique_id_configured()
discovery = self._discovered_devices[address] discovery = self._discovered_devices[address]
if discovery.device.firmware.need_firmware_upgrade:
return self.async_abort(reason="firmware_upgrade_required")
self.context["title_placeholders"] = { self.context["title_placeholders"] = {
"name": discovery.name, "name": discovery.name,
} }
@@ -167,53 +146,32 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_create_entry(title=discovery.name, data={}) return self.async_create_entry(title=discovery.name, data={})
current_addresses = self._async_current_ids(include_ignore=False) current_addresses = self._async_current_ids(include_ignore=False)
devices: list[BluetoothServiceInfoBleak] = []
for discovery_info in async_discovered_service_info(self.hass): for discovery_info in async_discovered_service_info(self.hass):
address = discovery_info.address address = discovery_info.address
if address in current_addresses or address in self._discovered_devices: if address in current_addresses or address in self._discovered_devices:
continue continue
if MFCT_ID not in discovery_info.manufacturer_data: if MFCT_ID not in discovery_info.manufacturer_data:
continue continue
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
_LOGGER.debug(
"Skipping unsupported device: %s (%s)", discovery_info.name, address
)
continue
devices.append(discovery_info)
for discovery_info in devices: if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
address = discovery_info.address continue
data = AirthingsBluetoothDeviceData(logger=_LOGGER)
try: try:
device = await self._get_device(data, discovery_info) device = await self._get_device_data(discovery_info)
except AirthingsDeviceUpdateError: except AirthingsDeviceUpdateError:
_LOGGER.error( return self.async_abort(reason="cannot_connect")
"Error connecting to and getting data from %s (%s)",
discovery_info.name,
discovery_info.address,
)
continue
except UnsupportedDeviceError:
_LOGGER.debug(
"Skipping unsupported device: %s (%s)",
discovery_info.name,
discovery_info.address,
)
continue
except Exception: except Exception:
_LOGGER.exception("Unknown error occurred") _LOGGER.exception("Unknown error occurred")
return self.async_abort(reason="unknown") return self.async_abort(reason="unknown")
name = get_name(device) name = get_name(device)
_LOGGER.debug("Discovered Airthings device: %s (%s)", name, address) self._discovered_devices[address] = Discovery(name, discovery_info, device)
self._discovered_devices[address] = Discovery(
name, discovery_info, device, data
)
if not self._discovered_devices: if not self._discovered_devices:
return self.async_abort(reason="no_devices_found") return self.async_abort(reason="no_devices_found")
titles = { titles = {
address: get_name(discovery.device) address: discovery.device.name
for (address, discovery) in self._discovered_devices.items() for (address, discovery) in self._discovered_devices.items()
} }
return self.async_show_form( return self.async_show_form(

View File

@@ -17,10 +17,6 @@
{ {
"manufacturer_id": 820, "manufacturer_id": 820,
"service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba" "service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba"
},
{
"manufacturer_id": 820,
"service_uuid": "b42e90a2-ade7-11e4-89d3-123b93f75cba"
} }
], ],
"codeowners": ["@vincegio", "@LaStrada"], "codeowners": ["@vincegio", "@LaStrada"],
@@ -28,5 +24,5 @@
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/airthings_ble", "documentation": "https://www.home-assistant.io/integrations/airthings_ble",
"iot_class": "local_polling", "iot_class": "local_polling",
"requirements": ["airthings-ble==1.1.1"] "requirements": ["airthings-ble==0.9.2"]
} }

View File

@@ -16,12 +16,10 @@ from homeassistant.components.sensor import (
from homeassistant.const import ( from homeassistant.const import (
CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION, CONCENTRATION_PARTS_PER_MILLION,
LIGHT_LUX,
PERCENTAGE, PERCENTAGE,
EntityCategory, EntityCategory,
Platform, Platform,
UnitOfPressure, UnitOfPressure,
UnitOfSoundPressure,
UnitOfTemperature, UnitOfTemperature,
) )
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
@@ -114,25 +112,8 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
state_class=SensorStateClass.MEASUREMENT, state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0, suggested_display_precision=0,
), ),
"lux": SensorEntityDescription(
key="lux",
device_class=SensorDeviceClass.ILLUMINANCE,
native_unit_of_measurement=LIGHT_LUX,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"noise": SensorEntityDescription(
key="noise",
translation_key="ambient_noise",
device_class=SensorDeviceClass.SOUND_PRESSURE,
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
} }
PARALLEL_UPDATES = 0
@callback @callback
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None: def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:

View File

@@ -6,9 +6,6 @@
"description": "[%key:component::bluetooth::config::step::user::description%]", "description": "[%key:component::bluetooth::config::step::user::description%]",
"data": { "data": {
"address": "[%key:common::config_flow::data::device%]" "address": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"address": "The Airthings devices discovered via Bluetooth."
} }
}, },
"bluetooth_confirm": { "bluetooth_confirm": {
@@ -20,8 +17,6 @@
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"firmware_upgrade_required": "Your device requires a firmware upgrade. Please use the Airthings app (Android/iOS) to upgrade it.",
"unsupported_device": "Unsupported device",
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
} }
}, },
@@ -41,9 +36,6 @@
}, },
"illuminance": { "illuminance": {
"name": "[%key:component::sensor::entity_component::illuminance::name%]" "name": "[%key:component::sensor::entity_component::illuminance::name%]"
},
"ambient_noise": {
"name": "Ambient noise"
} }
} }
} }

View File

@@ -2,14 +2,17 @@
from airtouch4pyapi import AirTouch from airtouch4pyapi import AirTouch
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, Platform from homeassistant.const import CONF_HOST, Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator from .coordinator import AirtouchDataUpdateCoordinator
PLATFORMS = [Platform.CLIMATE] PLATFORMS = [Platform.CLIMATE]
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
"""Set up AirTouch4 from a config entry.""" """Set up AirTouch4 from a config entry."""
@@ -19,7 +22,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) ->
info = airtouch.GetAcs() info = airtouch.GetAcs()
if not info: if not info:
raise ConfigEntryNotReady raise ConfigEntryNotReady
coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch) coordinator = AirtouchDataUpdateCoordinator(hass, airtouch)
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator entry.runtime_data = coordinator

View File

@@ -2,34 +2,26 @@
import logging import logging
from airtouch4pyapi import AirTouch
from airtouch4pyapi.airtouch import AirTouchStatus from airtouch4pyapi.airtouch import AirTouchStatus
from homeassistant.components.climate import SCAN_INTERVAL from homeassistant.components.climate import SCAN_INTERVAL
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN from .const import DOMAIN
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator): class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Airtouch data.""" """Class to manage fetching Airtouch data."""
def __init__( def __init__(self, hass, airtouch):
self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch
) -> None:
"""Initialize global Airtouch data updater.""" """Initialize global Airtouch data updater."""
self.airtouch = airtouch self.airtouch = airtouch
super().__init__( super().__init__(
hass, hass,
_LOGGER, _LOGGER,
config_entry=entry,
name=DOMAIN, name=DOMAIN,
update_interval=SCAN_INTERVAL, update_interval=SCAN_INTERVAL,
) )

View File

@@ -22,17 +22,6 @@ class OAuth2FlowHandler(
VERSION = CONFIG_FLOW_VERSION VERSION = CONFIG_FLOW_VERSION
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Check we have the cloud integration set up."""
if "cloud" not in self.hass.config.components:
return self.async_abort(
reason="cloud_not_enabled",
description_placeholders={"default_config": "default_config"},
)
return await super().async_step_user(user_input)
async def async_step_reauth( async def async_step_reauth(
self, user_input: Mapping[str, Any] self, user_input: Mapping[str, Any]
) -> ConfigFlowResult: ) -> ConfigFlowResult:

View File

@@ -24,8 +24,7 @@
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.", "wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml."
}, },
"create_entry": { "create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]" "default": "[%key:common::config_flow::create_entry::authenticated%]"

View File

@@ -2,9 +2,10 @@
from __future__ import annotations from __future__ import annotations
import asyncio
from datetime import timedelta from datetime import timedelta
import logging import logging
from typing import Any, Final, final from typing import TYPE_CHECKING, Any, Final, final
from propcache.api import cached_property from propcache.api import cached_property
import voluptuous as vol import voluptuous as vol
@@ -27,6 +28,8 @@ from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.config_validation import make_entity_service_schema from homeassistant.helpers.config_validation import make_entity_service_schema
from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity_platform import EntityPlatform
from homeassistant.helpers.frame import ReportBehavior, report_usage
from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey from homeassistant.util.hass_dict import HassKey
@@ -146,11 +149,68 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
) )
_alarm_control_panel_option_default_code: str | None = None _alarm_control_panel_option_default_code: str | None = None
__alarm_legacy_state: bool = False
def __init_subclass__(cls, **kwargs: Any) -> None:
"""Post initialisation processing."""
super().__init_subclass__(**kwargs)
if any(method in cls.__dict__ for method in ("_attr_state", "state")):
# Integrations should use the 'alarm_state' property instead of
# setting the state directly.
cls.__alarm_legacy_state = True
def __setattr__(self, name: str, value: Any, /) -> None:
"""Set attribute.
Deprecation warning if setting '_attr_state' directly
unless already reported.
"""
if name == "_attr_state":
self._report_deprecated_alarm_state_handling()
return super().__setattr__(name, value)
@callback
def add_to_platform_start(
self,
hass: HomeAssistant,
platform: EntityPlatform,
parallel_updates: asyncio.Semaphore | None,
) -> None:
"""Start adding an entity to a platform."""
super().add_to_platform_start(hass, platform, parallel_updates)
if self.__alarm_legacy_state:
self._report_deprecated_alarm_state_handling()
@callback
def _report_deprecated_alarm_state_handling(self) -> None:
"""Report on deprecated handling of alarm state.
Integrations should implement alarm_state instead of using state directly.
"""
report_usage(
"is setting state directly."
f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'"
" property and return its state using the AlarmControlPanelState enum",
core_integration_behavior=ReportBehavior.ERROR,
custom_integration_behavior=ReportBehavior.LOG,
breaks_in_ha_version="2025.11",
integration_domain=self.platform.platform_name if self.platform else None,
exclude_integrations={DOMAIN},
)
@final @final
@property @property
def state(self) -> str | None: def state(self) -> str | None:
"""Return the current state.""" """Return the current state."""
return self.alarm_state if (alarm_state := self.alarm_state) is not None:
return alarm_state
if self._attr_state is not None:
# Backwards compatibility for integrations that set state directly
# Should be removed in 2025.11
if TYPE_CHECKING:
assert isinstance(self._attr_state, str)
return self._attr_state
return None
@cached_property @cached_property
def alarm_state(self) -> AlarmControlPanelState | None: def alarm_state(self) -> AlarmControlPanelState | None:

View File

@@ -1472,10 +1472,10 @@ class AlexaModeController(AlexaCapability):
# Return state instead of position when using ModeController. # Return state instead of position when using ModeController.
mode = self.entity.state mode = self.entity.state
if mode in ( if mode in (
cover.CoverState.OPEN, cover.STATE_OPEN,
cover.CoverState.OPENING, cover.STATE_OPENING,
cover.CoverState.CLOSED, cover.STATE_CLOSED,
cover.CoverState.CLOSING, cover.STATE_CLOSING,
STATE_UNKNOWN, STATE_UNKNOWN,
): ):
return f"{cover.ATTR_POSITION}.{mode}" return f"{cover.ATTR_POSITION}.{mode}"
@@ -1594,11 +1594,11 @@ class AlexaModeController(AlexaCapability):
["Position", AlexaGlobalCatalog.SETTING_OPENING], False ["Position", AlexaGlobalCatalog.SETTING_OPENING], False
) )
self._resource.add_mode( self._resource.add_mode(
f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}", f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}",
[AlexaGlobalCatalog.VALUE_OPEN], [AlexaGlobalCatalog.VALUE_OPEN],
) )
self._resource.add_mode( self._resource.add_mode(
f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}", f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}",
[AlexaGlobalCatalog.VALUE_CLOSE], [AlexaGlobalCatalog.VALUE_CLOSE],
) )
self._resource.add_mode( self._resource.add_mode(
@@ -1651,22 +1651,22 @@ class AlexaModeController(AlexaCapability):
raise_labels.append(AlexaSemantics.ACTION_OPEN) raise_labels.append(AlexaSemantics.ACTION_OPEN)
self._semantics.add_states_to_value( self._semantics.add_states_to_value(
[AlexaSemantics.STATES_CLOSED], [AlexaSemantics.STATES_CLOSED],
f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}", f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}",
) )
self._semantics.add_states_to_value( self._semantics.add_states_to_value(
[AlexaSemantics.STATES_OPEN], [AlexaSemantics.STATES_OPEN],
f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}", f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}",
) )
self._semantics.add_action_to_directive( self._semantics.add_action_to_directive(
lower_labels, lower_labels,
"SetMode", "SetMode",
{"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}"}, {"mode": f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}"},
) )
self._semantics.add_action_to_directive( self._semantics.add_action_to_directive(
raise_labels, raise_labels,
"SetMode", "SetMode",
{"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}"}, {"mode": f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}"},
) )
return self._semantics.serialize_semantics() return self._semantics.serialize_semantics()

View File

@@ -1261,9 +1261,9 @@ async def async_api_set_mode(
elif instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}": elif instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
position = mode.split(".")[1] position = mode.split(".")[1]
if position == cover.CoverState.CLOSED: if position == cover.STATE_CLOSED:
service = cover.SERVICE_CLOSE_COVER service = cover.SERVICE_CLOSE_COVER
elif position == cover.CoverState.OPEN: elif position == cover.STATE_OPEN:
service = cover.SERVICE_OPEN_COVER service = cover.SERVICE_OPEN_COVER
elif position == "custom": elif position == "custom":
service = cover.SERVICE_STOP_COVER service = cover.SERVICE_STOP_COVER

View File

@@ -18,9 +18,7 @@ from homeassistant.components.binary_sensor import (
from homeassistant.const import EntityCategory from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
import homeassistant.helpers.entity_registry as er
from .const import _LOGGER, DOMAIN
from .coordinator import AmazonConfigEntry from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity from .entity import AmazonEntity
from .utils import async_update_unique_id from .utils import async_update_unique_id
@@ -53,47 +51,11 @@ BINARY_SENSORS: Final = (
), ),
is_supported=lambda device, key: device.sensors.get(key) is not None, is_supported=lambda device, key: device.sensors.get(key) is not None,
is_available_fn=lambda device, key: ( is_available_fn=lambda device, key: (
device.online device.online and device.sensors[key].error is False
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
), ),
), ),
) )
DEPRECATED_BINARY_SENSORS: Final = (
AmazonBinarySensorEntityDescription(
key="bluetooth",
entity_category=EntityCategory.DIAGNOSTIC,
translation_key="bluetooth",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: False,
),
)
async def async_setup_entry( async def async_setup_entry(
hass: HomeAssistant, hass: HomeAssistant,
@@ -104,8 +66,6 @@ async def async_setup_entry(
coordinator = entry.runtime_data coordinator = entry.runtime_data
entity_registry = er.async_get(hass)
# Replace unique id for "detectionState" binary sensor # Replace unique id for "detectionState" binary sensor
await async_update_unique_id( await async_update_unique_id(
hass, hass,
@@ -115,16 +75,6 @@ async def async_setup_entry(
"detectionState", "detectionState",
) )
# Clean up deprecated sensors
for sensor_desc in DEPRECATED_BINARY_SENSORS:
for serial_num in coordinator.data:
unique_id = f"{serial_num}-{sensor_desc.key}"
if entity_id := entity_registry.async_get_entity_id(
BINARY_SENSOR_DOMAIN, DOMAIN, unique_id
):
_LOGGER.debug("Removing deprecated entity %s", entity_id)
entity_registry.async_remove(entity_id)
known_devices: set[str] = set() known_devices: set[str] = set()
def _check_device() -> None: def _check_device() -> None:

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["aioamazondevices"], "loggers": ["aioamazondevices"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["aioamazondevices==6.4.1"] "requirements": ["aioamazondevices==6.2.6"]
} }

View File

@@ -32,9 +32,7 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online device.online and device.sensors[key].error is False
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
) )
@@ -42,9 +40,9 @@ SENSORS: Final = (
AmazonSensorEntityDescription( AmazonSensorEntityDescription(
key="temperature", key="temperature",
device_class=SensorDeviceClass.TEMPERATURE, device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement_fn=lambda device, key: ( native_unit_of_measurement_fn=lambda device, _key: (
UnitOfTemperature.CELSIUS UnitOfTemperature.CELSIUS
if key in device.sensors and device.sensors[key].scale == "CELSIUS" if device.sensors[_key].scale == "CELSIUS"
else UnitOfTemperature.FAHRENHEIT else UnitOfTemperature.FAHRENHEIT
), ),
state_class=SensorStateClass.MEASUREMENT, state_class=SensorStateClass.MEASUREMENT,

View File

@@ -18,11 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity from .entity import AmazonEntity
from .utils import ( from .utils import alexa_api_call, async_update_unique_id
alexa_api_call,
async_remove_dnd_from_virtual_group,
async_update_unique_id,
)
PARALLEL_UPDATES = 1 PARALLEL_UPDATES = 1
@@ -33,9 +29,7 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
is_on_fn: Callable[[AmazonDevice], bool] is_on_fn: Callable[[AmazonDevice], bool]
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online device.online and device.sensors[key].error is False
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
) )
method: str method: str
@@ -64,9 +58,6 @@ async def async_setup_entry(
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd" hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
) )
# Remove DND switch from virtual groups
await async_remove_dnd_from_virtual_group(hass, coordinator)
known_devices: set[str] = set() known_devices: set[str] = set()
def _check_device() -> None: def _check_device() -> None:

View File

@@ -4,10 +4,8 @@ from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps from functools import wraps
from typing import Any, Concatenate from typing import Any, Concatenate
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.entity_registry as er import homeassistant.helpers.entity_registry as er
@@ -63,21 +61,3 @@ async def async_update_unique_id(
# Update the registry with the new unique_id # Update the registry with the new unique_id
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id) entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
async def async_remove_dnd_from_virtual_group(
hass: HomeAssistant,
coordinator: AmazonDevicesCoordinator,
) -> None:
"""Remove entity DND from virtual group."""
entity_registry = er.async_get(hass)
for serial_num in coordinator.data:
unique_id = f"{serial_num}-do_not_disturb"
entity_id = entity_registry.async_get_entity_id(
DOMAIN, SWITCH_DOMAIN, unique_id
)
is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY
if entity_id and is_group:
entity_registry.async_remove(entity_id)
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)

View File

@@ -65,31 +65,6 @@ SENSOR_DESCRIPTIONS = [
suggested_display_precision=2, suggested_display_precision=2,
translation_placeholders={"sensor_name": "BME280"}, translation_placeholders={"sensor_name": "BME280"},
), ),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.HUMIDITY,
key="BME680_humidity",
translation_key="humidity",
native_unit_of_measurement=PERCENTAGE,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "BME680"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.PRESSURE,
key="BME680_pressure",
translation_key="pressure",
native_unit_of_measurement=UnitOfPressure.PA,
suggested_unit_of_measurement=UnitOfPressure.MMHG,
suggested_display_precision=0,
translation_placeholders={"sensor_name": "BME680"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.TEMPERATURE,
key="BME680_temperature",
translation_key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "BME680"},
),
AltruistSensorEntityDescription( AltruistSensorEntityDescription(
device_class=SensorDeviceClass.PRESSURE, device_class=SensorDeviceClass.PRESSURE,
key="BMP_pressure", key="BMP_pressure",

View File

@@ -505,7 +505,7 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications() DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901 async def async_devices_payload(hass: HomeAssistant) -> dict:
"""Return detailed information about entities and devices.""" """Return detailed information about entities and devices."""
dev_reg = dr.async_get(hass) dev_reg = dr.async_get(hass)
ent_reg = er.async_get(hass) ent_reg = er.async_get(hass)
@@ -513,8 +513,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
integration_inputs: dict[str, tuple[list[str], list[str]]] = {} integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
integration_configs: dict[str, AnalyticsModifications] = {} integration_configs: dict[str, AnalyticsModifications] = {}
removed_devices: set[str] = set()
# Get device list # Get device list
for device_entry in dev_reg.devices.values(): for device_entry in dev_reg.devices.values():
if not device_entry.primary_config_entry: if not device_entry.primary_config_entry:
@@ -527,10 +525,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
if config_entry is None: if config_entry is None:
continue continue
if device_entry.entry_type is dr.DeviceEntryType.SERVICE:
removed_devices.add(device_entry.id)
continue
integration_domain = config_entry.domain integration_domain = config_entry.domain
integration_input = integration_inputs.setdefault(integration_domain, ([], [])) integration_input = integration_inputs.setdefault(integration_domain, ([], []))
@@ -557,7 +551,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
for domain, integration_info in integration_inputs.items() for domain, integration_info in integration_inputs.items()
if (integration := integrations.get(domain)) is not None if (integration := integrations.get(domain)) is not None
and integration.is_built_in and integration.is_built_in
and integration.manifest.get("integration_type") in ("device", "hub") and integration.integration_type in ("device", "hub")
} }
# Call integrations that implement the analytics platform # Call integrations that implement the analytics platform
@@ -620,15 +614,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
device_config = integration_config.devices.get(device_id, device_config) device_config = integration_config.devices.get(device_id, device_config)
if device_config.remove: if device_config.remove:
removed_devices.add(device_id)
continue continue
device_entry = dev_reg.devices[device_id] device_entry = dev_reg.devices[device_id]
device_id_mapping[device_id] = (integration_domain, len(devices_info)) device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
devices_info.append( devices_info.append(
{ {
"entities": [],
"entry_type": device_entry.entry_type, "entry_type": device_entry.entry_type,
"has_configuration_url": device_entry.configuration_url is not None, "has_configuration_url": device_entry.configuration_url is not None,
"hw_version": device_entry.hw_version, "hw_version": device_entry.hw_version,
@@ -637,7 +631,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"model_id": device_entry.model_id, "model_id": device_entry.model_id,
"sw_version": device_entry.sw_version, "sw_version": device_entry.sw_version,
"via_device": device_entry.via_device_id, "via_device": device_entry.via_device_id,
"entities": [],
} }
) )
@@ -676,7 +669,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
entity_entry = ent_reg.entities[entity_id] entity_entry = ent_reg.entities[entity_id]
entity_state = hass.states.get(entity_id) entity_state = hass.states.get(entity_entry.entity_id)
entity_info = { entity_info = {
# LIMITATION: `assumed_state` can be overridden by users; # LIMITATION: `assumed_state` can be overridden by users;
@@ -697,19 +690,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"unit_of_measurement": entity_entry.unit_of_measurement, "unit_of_measurement": entity_entry.unit_of_measurement,
} }
if (device_id_ := entity_entry.device_id) is not None: if (
if device_id_ in removed_devices: ((device_id_ := entity_entry.device_id) is not None)
# The device was removed, so we remove the entity too and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
continue and (new_device_id[0] == integration_domain)
):
if ( device_info = devices_info[new_device_id[1]]
new_device_id := device_id_mapping.get(device_id_) device_info["entities"].append(entity_info)
) is not None and (new_device_id[0] == integration_domain): else:
device_info = devices_info[new_device_id[1]] entities_info.append(entity_info)
device_info["entities"].append(entity_info)
continue
entities_info.append(entity_info)
return { return {
"version": "home-assistant:1", "version": "home-assistant:1",

View File

@@ -4,15 +4,12 @@ from __future__ import annotations
from collections.abc import Mapping from collections.abc import Mapping
from functools import partial from functools import partial
import json
import logging import logging
from typing import Any, cast from typing import Any, cast
import anthropic import anthropic
import voluptuous as vol import voluptuous as vol
from voluptuous_openapi import convert
from homeassistant.components.zone import ENTITY_ID_HOME
from homeassistant.config_entries import ( from homeassistant.config_entries import (
ConfigEntry, ConfigEntry,
ConfigEntryState, ConfigEntryState,
@@ -21,13 +18,7 @@ from homeassistant.config_entries import (
ConfigSubentryFlow, ConfigSubentryFlow,
SubentryFlowResult, SubentryFlowResult,
) )
from homeassistant.const import ( from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_API_KEY,
CONF_LLM_HASS_API,
CONF_NAME,
)
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import llm from homeassistant.helpers import llm
from homeassistant.helpers.selector import ( from homeassistant.helpers.selector import (
@@ -46,23 +37,12 @@ from .const import (
CONF_RECOMMENDED, CONF_RECOMMENDED,
CONF_TEMPERATURE, CONF_TEMPERATURE,
CONF_THINKING_BUDGET, CONF_THINKING_BUDGET,
CONF_WEB_SEARCH,
CONF_WEB_SEARCH_CITY,
CONF_WEB_SEARCH_COUNTRY,
CONF_WEB_SEARCH_MAX_USES,
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT_CONVERSATION_NAME, DEFAULT_CONVERSATION_NAME,
DOMAIN, DOMAIN,
RECOMMENDED_CHAT_MODEL, RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS, RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE, RECOMMENDED_TEMPERATURE,
RECOMMENDED_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET,
RECOMMENDED_WEB_SEARCH,
RECOMMENDED_WEB_SEARCH_MAX_USES,
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
WEB_SEARCH_UNSUPPORTED_MODELS,
) )
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -188,14 +168,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS): ) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large" errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH):
model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
errors[CONF_WEB_SEARCH] = "web_search_unsupported_model"
elif user_input.get(
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
):
user_input.update(await self._get_location_data())
if not errors: if not errors:
if self._is_new: if self._is_new:
@@ -243,68 +215,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
errors=errors or None, errors=errors or None,
) )
async def _get_location_data(self) -> dict[str, str]:
"""Get approximate location data of the user."""
location_data: dict[str, str] = {}
zone_home = self.hass.states.get(ENTITY_ID_HOME)
if zone_home is not None:
client = await self.hass.async_add_executor_job(
partial(
anthropic.AsyncAnthropic,
api_key=self._get_entry().data[CONF_API_KEY],
)
)
location_schema = vol.Schema(
{
vol.Optional(
CONF_WEB_SEARCH_CITY,
description="Free text input for the city, e.g. `San Francisco`",
): str,
vol.Optional(
CONF_WEB_SEARCH_REGION,
description="Free text input for the region, e.g. `California`",
): str,
}
)
response = await client.messages.create(
model=RECOMMENDED_CHAT_MODEL,
messages=[
{
"role": "user",
"content": "Where are the following coordinates located: "
f"({zone_home.attributes[ATTR_LATITUDE]},"
f" {zone_home.attributes[ATTR_LONGITUDE]})? Please respond "
"only with a JSON object using the following schema:\n"
f"{convert(location_schema)}",
},
{
"role": "assistant",
"content": "{", # hints the model to skip any preamble
},
],
max_tokens=RECOMMENDED_MAX_TOKENS,
)
_LOGGER.debug("Model response: %s", response.content)
location_data = location_schema(
json.loads(
"{"
+ "".join(
block.text
for block in response.content
if isinstance(block, anthropic.types.TextBlock)
)
)
or {}
)
if self.hass.config.country:
location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country
location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone
_LOGGER.debug("Location data: %s", location_data)
return location_data
async_step_user = async_step_set_options async_step_user = async_step_set_options
async_step_reconfigure = async_step_set_options async_step_reconfigure = async_step_set_options
@@ -363,18 +273,6 @@ def anthropic_config_option_schema(
CONF_THINKING_BUDGET, CONF_THINKING_BUDGET,
default=RECOMMENDED_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET,
): int, ): int,
vol.Optional(
CONF_WEB_SEARCH,
default=RECOMMENDED_WEB_SEARCH,
): bool,
vol.Optional(
CONF_WEB_SEARCH_MAX_USES,
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
): int,
vol.Optional(
CONF_WEB_SEARCH_USER_LOCATION,
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
): bool,
} }
) )
return schema return schema

View File

@@ -18,26 +18,10 @@ RECOMMENDED_TEMPERATURE = 1.0
CONF_THINKING_BUDGET = "thinking_budget" CONF_THINKING_BUDGET = "thinking_budget"
RECOMMENDED_THINKING_BUDGET = 0 RECOMMENDED_THINKING_BUDGET = 0
MIN_THINKING_BUDGET = 1024 MIN_THINKING_BUDGET = 1024
CONF_WEB_SEARCH = "web_search"
RECOMMENDED_WEB_SEARCH = False
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
RECOMMENDED_WEB_SEARCH_MAX_USES = 5
CONF_WEB_SEARCH_CITY = "city"
CONF_WEB_SEARCH_REGION = "region"
CONF_WEB_SEARCH_COUNTRY = "country"
CONF_WEB_SEARCH_TIMEZONE = "timezone"
NON_THINKING_MODELS = [ THINKING_MODELS = [
"claude-3-5", # Both sonnet and haiku "claude-3-7-sonnet",
"claude-3-opus", "claude-sonnet-4-0",
"claude-3-haiku", "claude-opus-4-0",
] "claude-opus-4-1",
WEB_SEARCH_UNSUPPORTED_MODELS = [
"claude-3-haiku",
"claude-3-opus",
"claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
] ]

View File

@@ -1,17 +1,12 @@
"""Base entity for Anthropic.""" """Base entity for Anthropic."""
from collections.abc import AsyncGenerator, Callable, Iterable from collections.abc import AsyncGenerator, Callable, Iterable
from dataclasses import dataclass, field
import json import json
from typing import Any from typing import Any
import anthropic import anthropic
from anthropic import AsyncStream from anthropic import AsyncStream
from anthropic.types import ( from anthropic.types import (
CitationsDelta,
CitationsWebSearchResultLocation,
CitationWebSearchResultLocationParam,
ContentBlockParam,
InputJSONDelta, InputJSONDelta,
MessageDeltaUsage, MessageDeltaUsage,
MessageParam, MessageParam,
@@ -21,16 +16,11 @@ from anthropic.types import (
RawContentBlockStopEvent, RawContentBlockStopEvent,
RawMessageDeltaEvent, RawMessageDeltaEvent,
RawMessageStartEvent, RawMessageStartEvent,
RawMessageStopEvent,
RedactedThinkingBlock, RedactedThinkingBlock,
RedactedThinkingBlockParam, RedactedThinkingBlockParam,
ServerToolUseBlock,
ServerToolUseBlockParam,
SignatureDelta, SignatureDelta,
TextBlock, TextBlock,
TextBlockParam, TextBlockParam,
TextCitation,
TextCitationParam,
TextDelta, TextDelta,
ThinkingBlock, ThinkingBlock,
ThinkingBlockParam, ThinkingBlockParam,
@@ -39,15 +29,9 @@ from anthropic.types import (
ThinkingDelta, ThinkingDelta,
ToolParam, ToolParam,
ToolResultBlockParam, ToolResultBlockParam,
ToolUnionParam,
ToolUseBlock, ToolUseBlock,
ToolUseBlockParam, ToolUseBlockParam,
Usage, Usage,
WebSearchTool20250305Param,
WebSearchToolRequestErrorParam,
WebSearchToolResultBlock,
WebSearchToolResultBlockParam,
WebSearchToolResultError,
) )
from anthropic.types.message_create_params import MessageCreateParamsStreaming from anthropic.types.message_create_params import MessageCreateParamsStreaming
from voluptuous_openapi import convert from voluptuous_openapi import convert
@@ -64,21 +48,14 @@ from .const import (
CONF_MAX_TOKENS, CONF_MAX_TOKENS,
CONF_TEMPERATURE, CONF_TEMPERATURE,
CONF_THINKING_BUDGET, CONF_THINKING_BUDGET,
CONF_WEB_SEARCH,
CONF_WEB_SEARCH_CITY,
CONF_WEB_SEARCH_COUNTRY,
CONF_WEB_SEARCH_MAX_USES,
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DOMAIN, DOMAIN,
LOGGER, LOGGER,
MIN_THINKING_BUDGET, MIN_THINKING_BUDGET,
NON_THINKING_MODELS,
RECOMMENDED_CHAT_MODEL, RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS, RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE, RECOMMENDED_TEMPERATURE,
RECOMMENDED_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET,
THINKING_MODELS,
) )
# Max number of back and forth with the LLM to generate a response # Max number of back and forth with the LLM to generate a response
@@ -96,69 +73,6 @@ def _format_tool(
) )
@dataclass(slots=True)
class CitationDetails:
"""Citation details for a content part."""
index: int = 0
"""Start position of the text."""
length: int = 0
"""Length of the relevant data."""
citations: list[TextCitationParam] = field(default_factory=list)
"""Citations for the content part."""
@dataclass(slots=True)
class ContentDetails:
"""Native data for AssistantContent."""
citation_details: list[CitationDetails] = field(default_factory=list)
def has_content(self) -> bool:
"""Check if there is any content."""
return any(detail.length > 0 for detail in self.citation_details)
def has_citations(self) -> bool:
"""Check if there are any citations."""
return any(detail.citations for detail in self.citation_details)
def add_citation_detail(self) -> None:
"""Add a new citation detail."""
if not self.citation_details or self.citation_details[-1].length > 0:
self.citation_details.append(
CitationDetails(
index=self.citation_details[-1].index
+ self.citation_details[-1].length
if self.citation_details
else 0
)
)
def add_citation(self, citation: TextCitation) -> None:
"""Add a citation to the current detail."""
if not self.citation_details:
self.citation_details.append(CitationDetails())
citation_param: TextCitationParam | None = None
if isinstance(citation, CitationsWebSearchResultLocation):
citation_param = CitationWebSearchResultLocationParam(
type="web_search_result_location",
title=citation.title,
url=citation.url,
cited_text=citation.cited_text,
encrypted_index=citation.encrypted_index,
)
if citation_param:
self.citation_details[-1].citations.append(citation_param)
def delete_empty(self) -> None:
"""Delete empty citation details."""
self.citation_details = [
detail for detail in self.citation_details if detail.citations
]
def _convert_content( def _convert_content(
chat_content: Iterable[conversation.Content], chat_content: Iterable[conversation.Content],
) -> list[MessageParam]: ) -> list[MessageParam]:
@@ -167,31 +81,15 @@ def _convert_content(
for content in chat_content: for content in chat_content:
if isinstance(content, conversation.ToolResultContent): if isinstance(content, conversation.ToolResultContent):
if content.tool_name == "web_search": tool_result_block = ToolResultBlockParam(
tool_result_block: ContentBlockParam = WebSearchToolResultBlockParam( type="tool_result",
type="web_search_tool_result", tool_use_id=content.tool_call_id,
tool_use_id=content.tool_call_id, content=json.dumps(content.tool_result),
content=content.tool_result["content"] )
if "content" in content.tool_result if not messages or messages[-1]["role"] != "user":
else WebSearchToolRequestErrorParam(
type="web_search_tool_result_error",
error_code=content.tool_result.get("error_code", "unavailable"), # type: ignore[typeddict-item]
),
)
external_tool = True
else:
tool_result_block = ToolResultBlockParam(
type="tool_result",
tool_use_id=content.tool_call_id,
content=json.dumps(content.tool_result),
)
external_tool = False
if not messages or messages[-1]["role"] != (
"assistant" if external_tool else "user"
):
messages.append( messages.append(
MessageParam( MessageParam(
role="assistant" if external_tool else "user", role="user",
content=[tool_result_block], content=[tool_result_block],
) )
) )
@@ -253,56 +151,13 @@ def _convert_content(
redacted_thinking_block redacted_thinking_block
) )
if content.content: if content.content:
current_index = 0 messages[-1]["content"].append( # type: ignore[union-attr]
for detail in ( TextBlockParam(type="text", text=content.content)
content.native.citation_details )
if isinstance(content.native, ContentDetails)
else [CitationDetails(length=len(content.content))]
):
if detail.index > current_index:
# Add text block for any text without citations
messages[-1]["content"].append( # type: ignore[union-attr]
TextBlockParam(
type="text",
text=content.content[current_index : detail.index],
)
)
messages[-1]["content"].append( # type: ignore[union-attr]
TextBlockParam(
type="text",
text=content.content[
detail.index : detail.index + detail.length
],
citations=detail.citations,
)
if detail.citations
else TextBlockParam(
type="text",
text=content.content[
detail.index : detail.index + detail.length
],
)
)
current_index = detail.index + detail.length
if current_index < len(content.content):
# Add text block for any remaining text without citations
messages[-1]["content"].append( # type: ignore[union-attr]
TextBlockParam(
type="text",
text=content.content[current_index:],
)
)
if content.tool_calls: if content.tool_calls:
messages[-1]["content"].extend( # type: ignore[union-attr] messages[-1]["content"].extend( # type: ignore[union-attr]
[ [
ServerToolUseBlockParam( ToolUseBlockParam(
type="server_tool_use",
id=tool_call.id,
name="web_search",
input=tool_call.tool_args,
)
if tool_call.external and tool_call.tool_name == "web_search"
else ToolUseBlockParam(
type="tool_use", type="tool_use",
id=tool_call.id, id=tool_call.id,
name=tool_call.tool_name, name=tool_call.tool_name,
@@ -318,12 +173,10 @@ def _convert_content(
return messages return messages
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place async def _transform_stream(
chat_log: conversation.ChatLog, chat_log: conversation.ChatLog,
stream: AsyncStream[MessageStreamEvent], stream: AsyncStream[MessageStreamEvent],
) -> AsyncGenerator[ ) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
]:
"""Transform the response stream into HA format. """Transform the response stream into HA format.
A typical stream of responses might look something like the following: A typical stream of responses might look something like the following:
@@ -356,13 +209,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
if stream is None: if stream is None:
raise TypeError("Expected a stream of messages") raise TypeError("Expected a stream of messages")
current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None current_tool_block: ToolUseBlockParam | None = None
current_tool_args: str current_tool_args: str
content_details = ContentDetails()
content_details.add_citation_detail()
input_usage: Usage | None = None input_usage: Usage | None = None
has_content = False
has_native = False has_native = False
first_block: bool
async for response in stream: async for response in stream:
LOGGER.debug("Received response: %s", response) LOGGER.debug("Received response: %s", response)
@@ -371,7 +222,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
if response.message.role != "assistant": if response.message.role != "assistant":
raise ValueError("Unexpected message role") raise ValueError("Unexpected message role")
input_usage = response.message.usage input_usage = response.message.usage
first_block = True
elif isinstance(response, RawContentBlockStartEvent): elif isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock): if isinstance(response.content_block, ToolUseBlock):
current_tool_block = ToolUseBlockParam( current_tool_block = ToolUseBlockParam(
@@ -382,37 +232,17 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
) )
current_tool_args = "" current_tool_args = ""
elif isinstance(response.content_block, TextBlock): elif isinstance(response.content_block, TextBlock):
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead. if has_content:
first_block
or (
not content_details.has_citations()
and response.content_block.citations is None
and content_details.has_content()
)
):
if content_details.has_citations():
content_details.delete_empty()
yield {"native": content_details}
content_details = ContentDetails()
yield {"role": "assistant"} yield {"role": "assistant"}
has_native = False has_native = False
first_block = False has_content = True
content_details.add_citation_detail()
if response.content_block.text: if response.content_block.text:
content_details.citation_details[-1].length += len(
response.content_block.text
)
yield {"content": response.content_block.text} yield {"content": response.content_block.text}
elif isinstance(response.content_block, ThinkingBlock): elif isinstance(response.content_block, ThinkingBlock):
if first_block or has_native: if has_native:
if content_details.has_citations():
content_details.delete_empty()
yield {"native": content_details}
content_details = ContentDetails()
content_details.add_citation_detail()
yield {"role": "assistant"} yield {"role": "assistant"}
has_native = False has_native = False
first_block = False has_content = False
elif isinstance(response.content_block, RedactedThinkingBlock): elif isinstance(response.content_block, RedactedThinkingBlock):
LOGGER.debug( LOGGER.debug(
"Some of Claudes internal reasoning has been automatically " "Some of Claudes internal reasoning has been automatically "
@@ -420,60 +250,15 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
"responses" "responses"
) )
if has_native: if has_native:
if content_details.has_citations():
content_details.delete_empty()
yield {"native": content_details}
content_details = ContentDetails()
content_details.add_citation_detail()
yield {"role": "assistant"} yield {"role": "assistant"}
has_native = False has_native = False
first_block = False has_content = False
yield {"native": response.content_block} yield {"native": response.content_block}
has_native = True has_native = True
elif isinstance(response.content_block, ServerToolUseBlock):
current_tool_block = ServerToolUseBlockParam(
type="server_tool_use",
id=response.content_block.id,
name=response.content_block.name,
input="",
)
current_tool_args = ""
elif isinstance(response.content_block, WebSearchToolResultBlock):
if content_details.has_citations():
content_details.delete_empty()
yield {"native": content_details}
content_details = ContentDetails()
content_details.add_citation_detail()
yield {
"role": "tool_result",
"tool_call_id": response.content_block.tool_use_id,
"tool_name": "web_search",
"tool_result": {
"type": "web_search_tool_result_error",
"error_code": response.content_block.content.error_code,
}
if isinstance(
response.content_block.content, WebSearchToolResultError
)
else {
"content": [
{
"type": "web_search_result",
"encrypted_content": block.encrypted_content,
"page_age": block.page_age,
"title": block.title,
"url": block.url,
}
for block in response.content_block.content
]
},
}
first_block = True
elif isinstance(response, RawContentBlockDeltaEvent): elif isinstance(response, RawContentBlockDeltaEvent):
if isinstance(response.delta, InputJSONDelta): if isinstance(response.delta, InputJSONDelta):
current_tool_args += response.delta.partial_json current_tool_args += response.delta.partial_json
elif isinstance(response.delta, TextDelta): elif isinstance(response.delta, TextDelta):
content_details.citation_details[-1].length += len(response.delta.text)
yield {"content": response.delta.text} yield {"content": response.delta.text}
elif isinstance(response.delta, ThinkingDelta): elif isinstance(response.delta, ThinkingDelta):
yield {"thinking_content": response.delta.thinking} yield {"thinking_content": response.delta.thinking}
@@ -486,8 +271,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
) )
} }
has_native = True has_native = True
elif isinstance(response.delta, CitationsDelta):
content_details.add_citation(response.delta.citation)
elif isinstance(response, RawContentBlockStopEvent): elif isinstance(response, RawContentBlockStopEvent):
if current_tool_block is not None: if current_tool_block is not None:
tool_args = json.loads(current_tool_args) if current_tool_args else {} tool_args = json.loads(current_tool_args) if current_tool_args else {}
@@ -498,7 +281,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
id=current_tool_block["id"], id=current_tool_block["id"],
tool_name=current_tool_block["name"], tool_name=current_tool_block["name"],
tool_args=tool_args, tool_args=tool_args,
external=current_tool_block["type"] == "server_tool_use",
) )
] ]
} }
@@ -508,12 +290,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
chat_log.async_trace(_create_token_stats(input_usage, usage)) chat_log.async_trace(_create_token_stats(input_usage, usage))
if response.delta.stop_reason == "refusal": if response.delta.stop_reason == "refusal":
raise HomeAssistantError("Potential policy violation detected") raise HomeAssistantError("Potential policy violation detected")
elif isinstance(response, RawMessageStopEvent):
if content_details.has_citations():
content_details.delete_empty()
yield {"native": content_details}
content_details = ContentDetails()
content_details.add_citation_detail()
def _create_token_stats( def _create_token_stats(
@@ -561,11 +337,21 @@ class AnthropicBaseLLMEntity(Entity):
"""Generate an answer for the chat log.""" """Generate an answer for the chat log."""
options = self.subentry.data options = self.subentry.data
tools: list[ToolParam] | None = None
if chat_log.llm_api:
tools = [
_format_tool(tool, chat_log.llm_api.custom_serializer)
for tool in chat_log.llm_api.tools
]
system = chat_log.content[0] system = chat_log.content[0]
if not isinstance(system, conversation.SystemContent): if not isinstance(system, conversation.SystemContent):
raise TypeError("First message must be a system message") raise TypeError("First message must be a system message")
messages = _convert_content(chat_log.content[1:]) messages = _convert_content(chat_log.content[1:])
client = self.entry.runtime_data
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
model_args = MessageCreateParamsStreaming( model_args = MessageCreateParamsStreaming(
@@ -575,10 +361,10 @@ class AnthropicBaseLLMEntity(Entity):
system=system.content, system=system.content,
stream=True, stream=True,
) )
if tools:
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET) model_args["tools"] = tools
if ( if (
not model.startswith(tuple(NON_THINKING_MODELS)) model.startswith(tuple(THINKING_MODELS))
and thinking_budget >= MIN_THINKING_BUDGET and thinking_budget >= MIN_THINKING_BUDGET
): ):
model_args["thinking"] = ThinkingConfigEnabledParam( model_args["thinking"] = ThinkingConfigEnabledParam(
@@ -590,34 +376,6 @@ class AnthropicBaseLLMEntity(Entity):
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
) )
tools: list[ToolUnionParam] = []
if chat_log.llm_api:
tools = [
_format_tool(tool, chat_log.llm_api.custom_serializer)
for tool in chat_log.llm_api.tools
]
if options.get(CONF_WEB_SEARCH):
web_search = WebSearchTool20250305Param(
name="web_search",
type="web_search_20250305",
max_uses=options.get(CONF_WEB_SEARCH_MAX_USES),
)
if options.get(CONF_WEB_SEARCH_USER_LOCATION):
web_search["user_location"] = {
"type": "approximate",
"city": options.get(CONF_WEB_SEARCH_CITY, ""),
"region": options.get(CONF_WEB_SEARCH_REGION, ""),
"country": options.get(CONF_WEB_SEARCH_COUNTRY, ""),
"timezone": options.get(CONF_WEB_SEARCH_TIMEZONE, ""),
}
tools.append(web_search)
if tools:
model_args["tools"] = tools
client = self.entry.runtime_data
# To prevent infinite loops, we limit the number of iterations # To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS): for _iteration in range(MAX_TOOL_ITERATIONS):
try: try:

View File

@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/anthropic", "documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service", "integration_type": "service",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"requirements": ["anthropic==0.69.0"] "requirements": ["anthropic==0.62.0"]
} }

View File

@@ -35,17 +35,11 @@
"temperature": "Temperature", "temperature": "Temperature",
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
"recommended": "Recommended model settings", "recommended": "Recommended model settings",
"thinking_budget": "Thinking budget", "thinking_budget_tokens": "Thinking budget"
"web_search": "Enable web search",
"web_search_max_uses": "Maximum web searches",
"user_location": "Include home location"
}, },
"data_description": { "data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template.", "prompt": "Instruct how the LLM should respond. This can be a template.",
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.", "thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
"web_search_max_uses": "Limit the number of searches performed per response",
"user_location": "Localize search results based on home location"
} }
} }
}, },
@@ -54,8 +48,7 @@
"entry_not_loaded": "Cannot add things while the configuration is disabled." "entry_not_loaded": "Cannot add things while the configuration is disabled."
}, },
"error": { "error": {
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.", "thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
"web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search."
} }
} }
} }

View File

@@ -7,8 +7,6 @@ from typing import Any
from pyaprilaire.const import Attribute from pyaprilaire.const import Attribute
from homeassistant.components.climate import ( from homeassistant.components.climate import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
FAN_AUTO, FAN_AUTO,
FAN_ON, FAN_ON,
PRESET_AWAY, PRESET_AWAY,
@@ -18,12 +16,7 @@ from homeassistant.components.climate import (
HVACAction, HVACAction,
HVACMode, HVACMode,
) )
from homeassistant.const import ( from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature
ATTR_TEMPERATURE,
PRECISION_HALVES,
PRECISION_WHOLE,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -239,15 +232,15 @@ class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
cool_setpoint = 0 cool_setpoint = 0
heat_setpoint = 0 heat_setpoint = 0
if temperature := kwargs.get(ATTR_TEMPERATURE): if temperature := kwargs.get("temperature"):
if self.coordinator.data.get(Attribute.MODE) == 3: if self.coordinator.data.get(Attribute.MODE) == 3:
cool_setpoint = temperature cool_setpoint = temperature
else: else:
heat_setpoint = temperature heat_setpoint = temperature
else: else:
if target_temp_low := kwargs.get(ATTR_TARGET_TEMP_LOW): if target_temp_low := kwargs.get("target_temp_low"):
heat_setpoint = target_temp_low heat_setpoint = target_temp_low
if target_temp_high := kwargs.get(ATTR_TARGET_TEMP_HIGH): if target_temp_high := kwargs.get("target_temp_high"):
cool_setpoint = target_temp_high cool_setpoint = target_temp_high
if cool_setpoint == 0 and heat_setpoint == 0: if cool_setpoint == 0 and heat_setpoint == 0:

View File

@@ -2,7 +2,9 @@
from __future__ import annotations from __future__ import annotations
from typing import Any from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
TRANSLATION_MAP = { TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes", "wan_rx": "sensor_rx_bytes",
@@ -34,7 +36,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")} return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T: def translate_to_legacy(raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists.""" """Translate raw data to legacy format for dicts and lists."""
if raw is None: if raw is None:

View File

@@ -5,5 +5,5 @@
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/autarco", "documentation": "https://www.home-assistant.io/integrations/autarco",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"requirements": ["autarco==3.2.0"] "requirements": ["autarco==3.1.0"]
} }

View File

@@ -26,6 +26,9 @@ async def async_setup_entry(
if CONF_HOST in config_entry.data: if CONF_HOST in config_entry.data:
coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session) coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session)
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
else: else:
coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session) coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session)
@@ -33,11 +36,6 @@ async def async_setup_entry(
config_entry.runtime_data = coordinator config_entry.runtime_data = coordinator
if CONF_HOST in config_entry.data:
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True return True

View File

@@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import frame from homeassistant.helpers import frame
from homeassistant.util import slugify from homeassistant.util import slugify
from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter
from . import util from . import util
from .agent import BackupAgent from .agent import BackupAgent
@@ -145,7 +144,7 @@ class DownloadBackupView(HomeAssistantView):
return Response(status=HTTPStatus.NOT_FOUND) return Response(status=HTTPStatus.NOT_FOUND)
else: else:
stream = await agent.async_download_backup(backup_id) stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream)) reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
worker_done_event = asyncio.Event() worker_done_event = asyncio.Event()
@@ -153,7 +152,7 @@ class DownloadBackupView(HomeAssistantView):
"""Call by the worker thread when it's done.""" """Call by the worker thread when it's done."""
hass.loop.call_soon_threadsafe(worker_done_event.set) hass.loop.call_soon_threadsafe(worker_done_event.set)
stream = AsyncIteratorWriter(hass.loop) stream = util.AsyncIteratorWriter(hass)
worker = threading.Thread( worker = threading.Thread(
target=util.decrypt_backup, target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []], args=[backup, reader, stream, password, on_done, 0, []],

View File

@@ -38,7 +38,6 @@ from homeassistant.helpers import (
) )
from homeassistant.helpers.json import json_bytes from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util, json as json_util from homeassistant.util import dt as dt_util, json as json_util
from homeassistant.util.async_iterator import AsyncIteratorReader
from . import util as backup_util from . import util as backup_util
from .agent import ( from .agent import (
@@ -73,6 +72,7 @@ from .models import (
) )
from .store import BackupStore from .store import BackupStore
from .util import ( from .util import (
AsyncIteratorReader,
DecryptedBackupStreamer, DecryptedBackupStreamer,
EncryptedBackupStreamer, EncryptedBackupStreamer,
make_backup_dir, make_backup_dir,
@@ -1525,7 +1525,7 @@ class BackupManager:
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb") reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
else: else:
backup_stream = await agent.async_download_backup(backup_id) backup_stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream)) reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
try: try:
await self.hass.async_add_executor_job( await self.hass.async_add_executor_job(
validate_password_stream, reader, password validate_password_stream, reader, password

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import asyncio import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine from collections.abc import AsyncIterator, Callable, Coroutine
from concurrent.futures import CancelledError, Future
import copy import copy
from dataclasses import dataclass, replace from dataclasses import dataclass, replace
from io import BytesIO from io import BytesIO
@@ -13,7 +14,7 @@ from pathlib import Path, PurePath
from queue import SimpleQueue from queue import SimpleQueue
import tarfile import tarfile
import threading import threading
from typing import IO, Any, cast from typing import IO, Any, Self, cast
import aiohttp import aiohttp
from securetar import SecureTarError, SecureTarFile, SecureTarReadError from securetar import SecureTarError, SecureTarFile, SecureTarReadError
@@ -22,11 +23,6 @@ from homeassistant.backup_restore import password_to_key
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util from homeassistant.util import dt as dt_util
from homeassistant.util.async_iterator import (
Abort,
AsyncIteratorReader,
AsyncIteratorWriter,
)
from homeassistant.util.json import JsonObjectType, json_loads_object from homeassistant.util.json import JsonObjectType, json_loads_object
from .const import BUF_SIZE, LOGGER from .const import BUF_SIZE, LOGGER
@@ -63,6 +59,12 @@ class BackupEmpty(DecryptError):
_message = "No tar files found in the backup." _message = "No tar files found in the backup."
class AbortCipher(HomeAssistantError):
"""Abort the cipher operation."""
_message = "Abort cipher operation."
def make_backup_dir(path: Path) -> None: def make_backup_dir(path: Path) -> None:
"""Create a backup directory if it does not exist.""" """Create a backup directory if it does not exist."""
path.mkdir(exist_ok=True) path.mkdir(exist_ok=True)
@@ -164,6 +166,106 @@ def validate_password(path: Path, password: str | None) -> bool:
return False return False
class AsyncIteratorReader:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._stream = stream
self._buffer: bytes | None = None
self._next_future: Future[bytes | None] | None = None
self._pos: int = 0
async def _next(self) -> bytes | None:
"""Get the next chunk from the iterator."""
return await anext(self._stream, None)
def abort(self) -> None:
"""Abort the reader."""
self._aborted = True
if self._next_future is not None:
self._next_future.cancel()
def read(self, n: int = -1, /) -> bytes:
"""Read data from the iterator."""
result = bytearray()
while n < 0 or len(result) < n:
if not self._buffer:
self._next_future = asyncio.run_coroutine_threadsafe(
self._next(), self._hass.loop
)
if self._aborted:
self._next_future.cancel()
raise AbortCipher
try:
self._buffer = self._next_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos = 0
if not self._buffer:
# The stream is exhausted
break
chunk = self._buffer[self._pos : self._pos + n]
result.extend(chunk)
n -= len(chunk)
self._pos += len(chunk)
if self._pos == len(self._buffer):
self._buffer = None
return bytes(result)
def close(self) -> None:
"""Close the iterator."""
class AsyncIteratorWriter:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._pos: int = 0
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
self._write_future: Future[bytes | None] | None = None
def __aiter__(self) -> Self:
"""Return the iterator."""
return self
async def __anext__(self) -> bytes:
"""Get the next chunk from the iterator."""
if data := await self._queue.get():
return data
raise StopAsyncIteration
def abort(self) -> None:
"""Abort the writer."""
self._aborted = True
if self._write_future is not None:
self._write_future.cancel()
def tell(self) -> int:
"""Return the current position in the iterator."""
return self._pos
def write(self, s: bytes, /) -> int:
"""Write data to the iterator."""
self._write_future = asyncio.run_coroutine_threadsafe(
self._queue.put(s), self._hass.loop
)
if self._aborted:
self._write_future.cancel()
raise AbortCipher
try:
self._write_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos += len(s)
return len(s)
def validate_password_stream( def validate_password_stream(
input_stream: IO[bytes], input_stream: IO[bytes],
password: str | None, password: str | None,
@@ -240,7 +342,7 @@ def decrypt_backup(
finally: finally:
# Write an empty chunk to signal the end of the stream # Write an empty chunk to signal the end of the stream
output_stream.write(b"") output_stream.write(b"")
except Abort: except AbortCipher:
LOGGER.debug("Cipher operation aborted") LOGGER.debug("Cipher operation aborted")
finally: finally:
on_done(error) on_done(error)
@@ -328,7 +430,7 @@ def encrypt_backup(
finally: finally:
# Write an empty chunk to signal the end of the stream # Write an empty chunk to signal the end of the stream
output_stream.write(b"") output_stream.write(b"")
except Abort: except AbortCipher:
LOGGER.debug("Cipher operation aborted") LOGGER.debug("Cipher operation aborted")
finally: finally:
on_done(error) on_done(error)
@@ -455,8 +557,8 @@ class _CipherBackupStreamer:
self._hass.loop.call_soon_threadsafe(worker_status.done.set) self._hass.loop.call_soon_threadsafe(worker_status.done.set)
stream = await self._open_stream() stream = await self._open_stream()
reader = AsyncIteratorReader(self._hass.loop, stream) reader = AsyncIteratorReader(self._hass, stream)
writer = AsyncIteratorWriter(self._hass.loop) writer = AsyncIteratorWriter(self._hass)
worker = threading.Thread( worker = threading.Thread(
target=self._cipher_func, target=self._cipher_func,
args=[ args=[

View File

@@ -73,12 +73,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
# Add the websocket and API client # Add the websocket and API client
entry.runtime_data = BangOlufsenData(websocket, client) entry.runtime_data = BangOlufsenData(websocket, client)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) # Start WebSocket connection
# Start WebSocket connection once the platforms have been loaded.
# This ensures that the initial WebSocket notifications are dispatched to entities
await client.connect_notifications(remote_control=True, reconnect=True) await client.connect_notifications(remote_control=True, reconnect=True)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True return True

View File

@@ -125,8 +125,7 @@ async def async_setup_entry(
async_add_entities( async_add_entities(
new_entities=[ new_entities=[
BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client) BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client)
], ]
update_before_add=True,
) )
# Register actions. # Register actions.
@@ -267,8 +266,34 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
self._software_status.software_version, self._software_status.software_version,
) )
# Get overall device state once. This is handled by WebSocket events the rest of the time.
product_state = await self._client.get_product_state()
# Get volume information.
if product_state.volume:
self._volume = product_state.volume
# Get all playback information.
# Ensure that the metadata is not None upon startup
if product_state.playback:
if product_state.playback.metadata:
self._playback_metadata = product_state.playback.metadata
self._remote_leader = product_state.playback.metadata.remote_leader
if product_state.playback.progress:
self._playback_progress = product_state.playback.progress
if product_state.playback.source:
self._source_change = product_state.playback.source
if product_state.playback.state:
self._playback_state = product_state.playback.state
# Set initial state
if self._playback_state.value:
self._state = self._playback_state.value
self._attr_media_position_updated_at = utcnow() self._attr_media_position_updated_at = utcnow()
# Get the highest resolution available of the given images.
self._media_image = get_highest_resolution_artwork(self._playback_metadata)
# If the device has been updated with new sources, then the API will fail here. # If the device has been updated with new sources, then the API will fail here.
await self._async_update_sources() await self._async_update_sources()

View File

@@ -272,13 +272,6 @@ async def async_setup_entry(
observations: list[ConfigType] = [ observations: list[ConfigType] = [
dict(subentry.data) for subentry in config_entry.subentries.values() dict(subentry.data) for subentry in config_entry.subentries.values()
] ]
for observation in observations:
if observation[CONF_PLATFORM] == CONF_TEMPLATE:
observation[CONF_VALUE_TEMPLATE] = Template(
observation[CONF_VALUE_TEMPLATE], hass
)
prior: float = config[CONF_PRIOR] prior: float = config[CONF_PRIOR]
probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD] probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD]
device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)

View File

@@ -19,8 +19,8 @@
"bleak-retry-connector==4.4.3", "bleak-retry-connector==4.4.3",
"bluetooth-adapters==2.1.0", "bluetooth-adapters==2.1.0",
"bluetooth-auto-recovery==1.5.3", "bluetooth-auto-recovery==1.5.3",
"bluetooth-data-tools==1.28.3", "bluetooth-data-tools==1.28.2",
"dbus-fast==2.44.5", "dbus-fast==2.44.3",
"habluetooth==5.7.0" "habluetooth==5.6.4"
] ]
} }

View File

@@ -8,7 +8,7 @@
"integration_type": "device", "integration_type": "device",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
"requirements": ["brother==5.1.1"], "requirements": ["brother==5.1.0"],
"zeroconf": [ "zeroconf": [
{ {
"type": "_printer._tcp.local.", "type": "_printer._tcp.local.",

View File

@@ -7,14 +7,12 @@ from typing import Any
from evolutionhttp import BryantEvolutionLocalClient from evolutionhttp import BryantEvolutionLocalClient
from homeassistant.components.climate import ( from homeassistant.components.climate import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
ClimateEntity, ClimateEntity,
ClimateEntityFeature, ClimateEntityFeature,
HVACAction, HVACAction,
HVACMode, HVACMode,
) )
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.device_registry import DeviceInfo
@@ -210,24 +208,24 @@ class BryantEvolutionClimate(ClimateEntity):
async def async_set_temperature(self, **kwargs: Any) -> None: async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature.""" """Set new target temperature."""
if value := kwargs.get(ATTR_TARGET_TEMP_HIGH): if kwargs.get("target_temp_high"):
temp = int(value) temp = int(kwargs["target_temp_high"])
if not await self._client.set_cooling_setpoint(temp): if not await self._client.set_cooling_setpoint(temp):
raise HomeAssistantError( raise HomeAssistantError(
translation_domain=DOMAIN, translation_key="failed_to_set_clsp" translation_domain=DOMAIN, translation_key="failed_to_set_clsp"
) )
self._attr_target_temperature_high = temp self._attr_target_temperature_high = temp
if value := kwargs.get(ATTR_TARGET_TEMP_LOW): if kwargs.get("target_temp_low"):
temp = int(value) temp = int(kwargs["target_temp_low"])
if not await self._client.set_heating_setpoint(temp): if not await self._client.set_heating_setpoint(temp):
raise HomeAssistantError( raise HomeAssistantError(
translation_domain=DOMAIN, translation_key="failed_to_set_htsp" translation_domain=DOMAIN, translation_key="failed_to_set_htsp"
) )
self._attr_target_temperature_low = temp self._attr_target_temperature_low = temp
if value := kwargs.get(ATTR_TEMPERATURE): if kwargs.get("temperature"):
temp = int(value) temp = int(kwargs["temperature"])
fn = ( fn = (
self._client.set_heating_setpoint self._client.set_heating_setpoint
if self.hvac_mode == HVACMode.HEAT if self.hvac_mode == HVACMode.HEAT

View File

@@ -315,7 +315,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.register_view(CalendarListView(component)) hass.http.register_view(CalendarListView(component))
hass.http.register_view(CalendarEventView(component)) hass.http.register_view(CalendarEventView(component))
frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar") frontend.async_register_built_in_panel(
hass, "calendar", "calendar", "hass:calendar"
)
websocket_api.async_register_command(hass, handle_calendar_event_create) websocket_api.async_register_command(hass, handle_calendar_event_create)
websocket_api.async_register_command(hass, handle_calendar_event_delete) websocket_api.async_register_command(hass, handle_calendar_event_delete)

View File

@@ -169,7 +169,7 @@ class CalendarEventListener:
def __init__( def __init__(
self, self,
hass: HomeAssistant, hass: HomeAssistant,
job: HassJob[..., Coroutine[Any, Any, None] | Any], job: HassJob[..., Coroutine[Any, Any, None]],
trigger_data: dict[str, Any], trigger_data: dict[str, Any],
fetcher: QueuedEventFetcher, fetcher: QueuedEventFetcher,
) -> None: ) -> None:

View File

@@ -51,6 +51,12 @@ from homeassistant.const import (
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.deprecation import (
DeprecatedConstantEnum,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.event import async_track_time_interval
@@ -112,6 +118,12 @@ ATTR_FILENAME: Final = "filename"
ATTR_MEDIA_PLAYER: Final = "media_player" ATTR_MEDIA_PLAYER: Final = "media_player"
ATTR_FORMAT: Final = "format" ATTR_FORMAT: Final = "format"
# These constants are deprecated as of Home Assistant 2024.10
# Please use the StreamType enum instead.
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
class CameraEntityFeature(IntFlag): class CameraEntityFeature(IntFlag):
"""Supported features of the camera entity.""" """Supported features of the camera entity."""
@@ -1105,3 +1117,11 @@ async def async_handle_record_service(
duration=service_call.data[CONF_DURATION], duration=service_call.data[CONF_DURATION],
lookback=service_call.data[CONF_LOOKBACK], lookback=service_call.data[CONF_LOOKBACK],
) )
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@@ -4,6 +4,5 @@
"codeowners": [], "codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/citybikes", "documentation": "https://www.home-assistant.io/integrations/citybikes",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"quality_scale": "legacy", "quality_scale": "legacy"
"requirements": ["python-citybikes==0.3.3"]
} }

View File

@@ -5,11 +5,8 @@ from __future__ import annotations
import asyncio import asyncio
from datetime import timedelta from datetime import timedelta
import logging import logging
import sys
import aiohttp import aiohttp
from citybikes import __version__ as CITYBIKES_CLIENT_VERSION
from citybikes.asyncio import Client as CitybikesClient
import voluptuous as vol import voluptuous as vol
from homeassistant.components.sensor import ( from homeassistant.components.sensor import (
@@ -18,18 +15,21 @@ from homeassistant.components.sensor import (
SensorEntity, SensorEntity,
) )
from homeassistant.const import ( from homeassistant.const import (
APPLICATION_NAME, ATTR_ID,
ATTR_LATITUDE,
ATTR_LOCATION,
ATTR_LONGITUDE,
ATTR_NAME,
CONF_LATITUDE, CONF_LATITUDE,
CONF_LONGITUDE, CONF_LONGITUDE,
CONF_NAME, CONF_NAME,
CONF_RADIUS, CONF_RADIUS,
EVENT_HOMEASSISTANT_CLOSE,
UnitOfLength, UnitOfLength,
__version__,
) )
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import config_validation as cv from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.event import async_track_time_interval
@@ -40,33 +40,31 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
HA_USER_AGENT = (
f"{APPLICATION_NAME}/{__version__} "
f"python-citybikes/{CITYBIKES_CLIENT_VERSION} "
f"Python/{sys.version_info[0]}.{sys.version_info[1]}"
)
ATTR_UID = "uid"
ATTR_LATITUDE = "latitude"
ATTR_LONGITUDE = "longitude"
ATTR_EMPTY_SLOTS = "empty_slots" ATTR_EMPTY_SLOTS = "empty_slots"
ATTR_EXTRA = "extra"
ATTR_FREE_BIKES = "free_bikes"
ATTR_NETWORK = "network"
ATTR_NETWORKS_LIST = "networks"
ATTR_STATIONS_LIST = "stations"
ATTR_TIMESTAMP = "timestamp" ATTR_TIMESTAMP = "timestamp"
ATTR_UID = "uid"
CONF_NETWORK = "network" CONF_NETWORK = "network"
CONF_STATIONS_LIST = "stations" CONF_STATIONS_LIST = "stations"
DEFAULT_ENDPOINT = "https://api.citybik.es/{uri}"
PLATFORM = "citybikes" PLATFORM = "citybikes"
MONITORED_NETWORKS = "monitored-networks" MONITORED_NETWORKS = "monitored-networks"
DATA_CLIENT = "client"
NETWORKS_URI = "v2/networks" NETWORKS_URI = "v2/networks"
REQUEST_TIMEOUT = aiohttp.ClientTimeout(total=5) REQUEST_TIMEOUT = 5 # In seconds; argument to asyncio.timeout
SCAN_INTERVAL = timedelta(minutes=5) # Timely, and doesn't suffocate the API SCAN_INTERVAL = timedelta(minutes=5) # Timely, and doesn't suffocate the API
STATIONS_URI = "v2/networks/{uid}?fields=network.stations"
CITYBIKES_ATTRIBUTION = ( CITYBIKES_ATTRIBUTION = (
"Information provided by the CityBikes Project (https://citybik.es/#about)" "Information provided by the CityBikes Project (https://citybik.es/#about)"
) )
@@ -89,6 +87,72 @@ PLATFORM_SCHEMA = vol.All(
), ),
) )
NETWORK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ID): cv.string,
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_LOCATION): vol.Schema(
{
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
},
extra=vol.REMOVE_EXTRA,
),
},
extra=vol.REMOVE_EXTRA,
)
NETWORKS_RESPONSE_SCHEMA = vol.Schema(
{vol.Required(ATTR_NETWORKS_LIST): [NETWORK_SCHEMA]}
)
STATION_SCHEMA = vol.Schema(
{
vol.Required(ATTR_FREE_BIKES): cv.positive_int,
vol.Required(ATTR_EMPTY_SLOTS): vol.Any(cv.positive_int, None),
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Required(ATTR_ID): cv.string,
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_TIMESTAMP): cv.string,
vol.Optional(ATTR_EXTRA): vol.Schema(
{vol.Optional(ATTR_UID): cv.string}, extra=vol.REMOVE_EXTRA
),
},
extra=vol.REMOVE_EXTRA,
)
STATIONS_RESPONSE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NETWORK): vol.Schema(
{vol.Required(ATTR_STATIONS_LIST): [STATION_SCHEMA]}, extra=vol.REMOVE_EXTRA
)
}
)
class CityBikesRequestError(Exception):
"""Error to indicate a CityBikes API request has failed."""
async def async_citybikes_request(hass, uri, schema):
"""Perform a request to CityBikes API endpoint, and parse the response."""
try:
session = async_get_clientsession(hass)
async with asyncio.timeout(REQUEST_TIMEOUT):
req = await session.get(DEFAULT_ENDPOINT.format(uri=uri))
json_response = await req.json()
return schema(json_response)
except (TimeoutError, aiohttp.ClientError):
_LOGGER.error("Could not connect to CityBikes API endpoint")
except ValueError:
_LOGGER.error("Received non-JSON data from CityBikes API endpoint")
except vol.Invalid as err:
_LOGGER.error("Received unexpected JSON from CityBikes API endpoint: %s", err)
raise CityBikesRequestError
async def async_setup_platform( async def async_setup_platform(
hass: HomeAssistant, hass: HomeAssistant,
@@ -111,14 +175,6 @@ async def async_setup_platform(
radius, UnitOfLength.FEET, UnitOfLength.METERS radius, UnitOfLength.FEET, UnitOfLength.METERS
) )
client = CitybikesClient(user_agent=HA_USER_AGENT, timeout=REQUEST_TIMEOUT)
hass.data[PLATFORM][DATA_CLIENT] = client
async def _async_close_client(event):
await client.close()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_client)
# Create a single instance of CityBikesNetworks. # Create a single instance of CityBikesNetworks.
networks = hass.data.setdefault(CITYBIKES_NETWORKS, CityBikesNetworks(hass)) networks = hass.data.setdefault(CITYBIKES_NETWORKS, CityBikesNetworks(hass))
@@ -138,10 +194,10 @@ async def async_setup_platform(
devices = [] devices = []
for station in network.stations: for station in network.stations:
dist = location_util.distance( dist = location_util.distance(
latitude, longitude, station.latitude, station.longitude latitude, longitude, station[ATTR_LATITUDE], station[ATTR_LONGITUDE]
) )
station_id = station.id station_id = station[ATTR_ID]
station_uid = str(station.extra.get(ATTR_UID, "")) station_uid = str(station.get(ATTR_EXTRA, {}).get(ATTR_UID, ""))
if radius > dist or stations_list.intersection((station_id, station_uid)): if radius > dist or stations_list.intersection((station_id, station_uid)):
if name: if name:
@@ -160,7 +216,6 @@ class CityBikesNetworks:
def __init__(self, hass): def __init__(self, hass):
"""Initialize the networks instance.""" """Initialize the networks instance."""
self.hass = hass self.hass = hass
self.client = hass.data[PLATFORM][DATA_CLIENT]
self.networks = None self.networks = None
self.networks_loading = asyncio.Condition() self.networks_loading = asyncio.Condition()
@@ -169,21 +224,24 @@ class CityBikesNetworks:
try: try:
await self.networks_loading.acquire() await self.networks_loading.acquire()
if self.networks is None: if self.networks is None:
self.networks = await self.client.networks.fetch() networks = await async_citybikes_request(
except aiohttp.ClientError as err: self.hass, NETWORKS_URI, NETWORKS_RESPONSE_SCHEMA
)
self.networks = networks[ATTR_NETWORKS_LIST]
except CityBikesRequestError as err:
raise PlatformNotReady from err raise PlatformNotReady from err
else: else:
result = None result = None
minimum_dist = None minimum_dist = None
for network in self.networks: for network in self.networks:
network_latitude = network.location.latitude network_latitude = network[ATTR_LOCATION][ATTR_LATITUDE]
network_longitude = network.location.longitude network_longitude = network[ATTR_LOCATION][ATTR_LONGITUDE]
dist = location_util.distance( dist = location_util.distance(
latitude, longitude, network_latitude, network_longitude latitude, longitude, network_latitude, network_longitude
) )
if minimum_dist is None or dist < minimum_dist: if minimum_dist is None or dist < minimum_dist:
minimum_dist = dist minimum_dist = dist
result = network.id result = network[ATTR_ID]
return result return result
finally: finally:
@@ -199,20 +257,22 @@ class CityBikesNetwork:
self.network_id = network_id self.network_id = network_id
self.stations = [] self.stations = []
self.ready = asyncio.Event() self.ready = asyncio.Event()
self.client = hass.data[PLATFORM][DATA_CLIENT]
async def async_refresh(self, now=None): async def async_refresh(self, now=None):
"""Refresh the state of the network.""" """Refresh the state of the network."""
try: try:
network = await self.client.network(uid=self.network_id).fetch() network = await async_citybikes_request(
except aiohttp.ClientError as err: self.hass,
if now is None: STATIONS_URI.format(uid=self.network_id),
STATIONS_RESPONSE_SCHEMA,
)
self.stations = network[ATTR_NETWORK][ATTR_STATIONS_LIST]
self.ready.set()
except CityBikesRequestError as err:
if now is not None:
self.ready.clear()
else:
raise PlatformNotReady from err raise PlatformNotReady from err
self.ready.clear()
return
self.stations = network.stations
self.ready.set()
class CityBikesStation(SensorEntity): class CityBikesStation(SensorEntity):
@@ -230,13 +290,16 @@ class CityBikesStation(SensorEntity):
async def async_update(self) -> None: async def async_update(self) -> None:
"""Update station state.""" """Update station state."""
station = next(s for s in self._network.stations if s.id == self._station_id) for station in self._network.stations:
self._attr_name = station.name if station[ATTR_ID] == self._station_id:
self._attr_native_value = station.free_bikes station_data = station
break
self._attr_name = station_data.get(ATTR_NAME)
self._attr_native_value = station_data.get(ATTR_FREE_BIKES)
self._attr_extra_state_attributes = { self._attr_extra_state_attributes = {
ATTR_UID: station.extra.get(ATTR_UID), ATTR_UID: station_data.get(ATTR_EXTRA, {}).get(ATTR_UID),
ATTR_LATITUDE: station.latitude, ATTR_LATITUDE: station_data.get(ATTR_LATITUDE),
ATTR_LONGITUDE: station.longitude, ATTR_LONGITUDE: station_data.get(ATTR_LONGITUDE),
ATTR_EMPTY_SLOTS: station.empty_slots, ATTR_EMPTY_SLOTS: station_data.get(ATTR_EMPTY_SLOTS),
ATTR_TIMESTAMP: station.timestamp, ATTR_TIMESTAMP: station_data.get(ATTR_TIMESTAMP),
} }

View File

@@ -53,6 +53,7 @@ from .const import (
CONF_ACME_SERVER, CONF_ACME_SERVER,
CONF_ALEXA, CONF_ALEXA,
CONF_ALIASES, CONF_ALIASES,
CONF_CLOUDHOOK_SERVER,
CONF_COGNITO_CLIENT_ID, CONF_COGNITO_CLIENT_ID,
CONF_ENTITY_CONFIG, CONF_ENTITY_CONFIG,
CONF_FILTER, CONF_FILTER,
@@ -129,6 +130,7 @@ CONFIG_SCHEMA = vol.Schema(
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str, vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str, vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str, vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str, vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str, vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str, vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,

View File

@@ -78,6 +78,7 @@ CONF_USER_POOL_ID = "user_pool_id"
CONF_ACCOUNT_LINK_SERVER = "account_link_server" CONF_ACCOUNT_LINK_SERVER = "account_link_server"
CONF_ACCOUNTS_SERVER = "accounts_server" CONF_ACCOUNTS_SERVER = "accounts_server"
CONF_ACME_SERVER = "acme_server" CONF_ACME_SERVER = "acme_server"
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
CONF_RELAYER_SERVER = "relayer_server" CONF_RELAYER_SERVER = "relayer_server"
CONF_REMOTESTATE_SERVER = "remotestate_server" CONF_REMOTESTATE_SERVER = "remotestate_server"
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server" CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"

View File

@@ -13,6 +13,6 @@
"integration_type": "system", "integration_type": "system",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"], "loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.2.0"], "requirements": ["hass-nabucasa==1.1.2"],
"single_config_entry": true "single_config_entry": true
} }

View File

@@ -1,106 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
The integration does not provide any actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: |
Stale docstring and test name: `test_form_home` and reusing result.
Extract `async_setup_entry` into own fixture.
Avoid importing `config_flow` in tests.
Test reauth with errors
config-flow:
status: todo
comment: |
The config flow misses data descriptions.
Remove URLs from data descriptions, they should be replaced with placeholders.
Make use of Electricity Maps zone keys in country code as dropdown.
Make use of location selector for coordinates.
dependency-transparency: done
docs-actions:
status: exempt
comment: |
The integration does not provide any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration do not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: todo
# Silver
action-exceptions:
status: exempt
comment: |
The integration does not provide any actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
The integration does not provide any additional options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: done
test-coverage:
status: todo
comment: |
Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})`
`test_sensor` could use `snapshot_platform`
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
discovery:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
The integration connects to a single service per configuration entry.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration does not raise any repairable issues.
stale-devices:
status: exempt
comment: |
This integration connect to a single device per configuration entry.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -15,7 +15,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .coordinator import ComelitConfigEntry, ComelitVedoSystem from .coordinator import ComelitConfigEntry, ComelitVedoSystem
from .utils import DeviceType, new_device_listener
# Coordinator is used to centralize the data updates # Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0 PARALLEL_UPDATES = 0
@@ -30,19 +29,23 @@ async def async_setup_entry(
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: known_devices: set[int] = set()
"""Add entities for new monitors."""
entities = [
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data["alarm_zones"].values()
if device in new_devices
]
if entities:
async_add_entities(entities)
config_entry.async_on_unload( def _check_device() -> None:
new_device_listener(coordinator, _add_new_entities, "alarm_zones") current_devices = set(coordinator.data["alarm_zones"])
) new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitVedoBinarySensorEntity(
coordinator, device, config_entry.entry_id
)
for device in coordinator.data["alarm_zones"].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitVedoBinarySensorEntity( class ComelitVedoBinarySensorEntity(

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from asyncio.exceptions import TimeoutError from asyncio.exceptions import TimeoutError
from collections.abc import Mapping from collections.abc import Mapping
import re
from typing import Any from typing import Any
from aiocomelit import ( from aiocomelit import (
@@ -28,20 +27,25 @@ from .utils import async_client_session
DEFAULT_HOST = "192.168.1.252" DEFAULT_HOST = "192.168.1.252"
DEFAULT_PIN = "111111" DEFAULT_PIN = "111111"
pin_regex = r"^[0-9]{4,10}$"
USER_SCHEMA = vol.Schema( USER_SCHEMA = vol.Schema(
{ {
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string, vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST), vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
} }
) )
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string}) STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
)
STEP_RECONFIGURE = vol.Schema( STEP_RECONFIGURE = vol.Schema(
{ {
vol.Required(CONF_HOST): cv.string, vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port, vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string, vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
} }
) )
@@ -51,9 +55,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
api: ComelitCommonApi api: ComelitCommonApi
if not re.fullmatch(r"[0-9]{4,10}", data[CONF_PIN]):
raise InvalidPin
session = await async_client_session(hass) session = await async_client_session(hass)
if data.get(CONF_TYPE, BRIDGE) == BRIDGE: if data.get(CONF_TYPE, BRIDGE) == BRIDGE:
api = ComeliteSerialBridgeApi( api = ComeliteSerialBridgeApi(
@@ -104,8 +105,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except InvalidAuth: except InvalidAuth:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001 except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception") _LOGGER.exception("Unexpected exception")
errors["base"] = "unknown" errors["base"] = "unknown"
@@ -147,8 +146,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except InvalidAuth: except InvalidAuth:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001 except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception") _LOGGER.exception("Unexpected exception")
errors["base"] = "unknown" errors["base"] = "unknown"
@@ -192,8 +189,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except InvalidAuth: except InvalidAuth:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001 except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception") _LOGGER.exception("Unexpected exception")
errors["base"] = "unknown" errors["base"] = "unknown"
@@ -215,7 +210,3 @@ class CannotConnect(HomeAssistantError):
class InvalidAuth(HomeAssistantError): class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth.""" """Error to indicate there is invalid auth."""
class InvalidPin(HomeAssistantError):
"""Error to indicate an invalid pin."""

View File

@@ -161,7 +161,7 @@ class ComelitSerialBridge(
entry: ComelitConfigEntry, entry: ComelitConfigEntry,
host: str, host: str,
port: int, port: int,
pin: str, pin: int,
session: ClientSession, session: ClientSession,
) -> None: ) -> None:
"""Initialize the scanner.""" """Initialize the scanner."""
@@ -195,7 +195,7 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
entry: ComelitConfigEntry, entry: ComelitConfigEntry,
host: str, host: str,
port: int, port: int,
pin: str, pin: int,
session: ClientSession, session: ClientSession,
) -> None: ) -> None:
"""Initialize the scanner.""" """Initialize the scanner."""

View File

@@ -7,14 +7,14 @@ from typing import Any, cast
from aiocomelit import ComelitSerialBridgeObject from aiocomelit import ComelitSerialBridgeObject
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState from homeassistant.components.cover import CoverDeviceClass, CoverEntity
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.restore_state import RestoreEntity
from .coordinator import ComelitConfigEntry, ComelitSerialBridge from .coordinator import ComelitConfigEntry, ComelitSerialBridge
from .entity import ComelitBridgeBaseEntity from .entity import ComelitBridgeBaseEntity
from .utils import DeviceType, bridge_api_call, new_device_listener from .utils import bridge_api_call
# Coordinator is used to centralize the data updates # Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0 PARALLEL_UPDATES = 0
@@ -29,19 +29,21 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: known_devices: set[int] = set()
"""Add entities for new monitors."""
entities = [
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[dev_type].values()
if device in new_devices
]
if entities:
async_add_entities(entities)
config_entry.async_on_unload( def _check_device() -> None:
new_device_listener(coordinator, _add_new_entities, COVER) current_devices = set(coordinator.data[COVER])
) new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[COVER].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
@@ -60,6 +62,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
super().__init__(coordinator, device, config_entry_entry_id) super().__init__(coordinator, device, config_entry_entry_id)
# Device doesn't provide a status so we assume UNKNOWN at first startup # Device doesn't provide a status so we assume UNKNOWN at first startup
self._last_action: int | None = None self._last_action: int | None = None
self._last_state: str | None = None
def _current_action(self, action: str) -> bool: def _current_action(self, action: str) -> bool:
"""Return the current cover action.""" """Return the current cover action."""
@@ -95,6 +98,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
@bridge_api_call @bridge_api_call
async def _cover_set_state(self, action: int, state: int) -> None: async def _cover_set_state(self, action: int, state: int) -> None:
"""Set desired cover state.""" """Set desired cover state."""
self._last_state = self.state
await self.coordinator.api.set_device_status(COVER, self._device.index, action) await self.coordinator.api.set_device_status(COVER, self._device.index, action)
self.coordinator.data[COVER][self._device.index].status = state self.coordinator.data[COVER][self._device.index].status = state
self.async_write_ha_state() self.async_write_ha_state()
@@ -120,10 +124,5 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
await super().async_added_to_hass() await super().async_added_to_hass()
if (state := await self.async_get_last_state()) is not None: if last_state := await self.async_get_last_state():
if state.state == CoverState.CLOSED: self._last_state = last_state.state
self._last_action = STATE_COVER.index(CoverState.CLOSING)
if state.state == CoverState.OPEN:
self._last_action = STATE_COVER.index(CoverState.OPENING)
self._attr_is_closed = state.state == CoverState.CLOSED

View File

@@ -12,7 +12,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ComelitConfigEntry, ComelitSerialBridge from .coordinator import ComelitConfigEntry, ComelitSerialBridge
from .entity import ComelitBridgeBaseEntity from .entity import ComelitBridgeBaseEntity
from .utils import DeviceType, bridge_api_call, new_device_listener from .utils import bridge_api_call
# Coordinator is used to centralize the data updates # Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0 PARALLEL_UPDATES = 0
@@ -27,19 +27,21 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: known_devices: set[int] = set()
"""Add entities for new monitors."""
entities = [
ComelitLightEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[dev_type].values()
if device in new_devices
]
if entities:
async_add_entities(entities)
config_entry.async_on_unload( def _check_device() -> None:
new_device_listener(coordinator, _add_new_entities, LIGHT) current_devices = set(coordinator.data[LIGHT])
) new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitLightEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[LIGHT].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity): class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):

View File

@@ -8,5 +8,5 @@
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["aiocomelit"], "loggers": ["aiocomelit"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["aiocomelit==1.1.1"] "requirements": ["aiocomelit==0.12.3"]
} }

View File

@@ -20,7 +20,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
from .entity import ComelitBridgeBaseEntity from .entity import ComelitBridgeBaseEntity
from .utils import DeviceType, new_device_listener
# Coordinator is used to centralize the data updates # Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0 PARALLEL_UPDATES = 0
@@ -66,22 +65,24 @@ async def async_setup_bridge_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: known_devices: set[int] = set()
"""Add entities for new monitors."""
entities = [
ComelitBridgeSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc
)
for sensor_desc in SENSOR_BRIDGE_TYPES
for device in coordinator.data[dev_type].values()
if device in new_devices
]
if entities:
async_add_entities(entities)
config_entry.async_on_unload( def _check_device() -> None:
new_device_listener(coordinator, _add_new_entities, OTHER) current_devices = set(coordinator.data[OTHER])
) new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitBridgeSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc
)
for sensor_desc in SENSOR_BRIDGE_TYPES
for device in coordinator.data[OTHER].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
async def async_setup_vedo_entry( async def async_setup_vedo_entry(
@@ -93,22 +94,24 @@ async def async_setup_vedo_entry(
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: known_devices: set[int] = set()
"""Add entities for new monitors."""
entities = [
ComelitVedoSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc
)
for sensor_desc in SENSOR_VEDO_TYPES
for device in coordinator.data["alarm_zones"].values()
if device in new_devices
]
if entities:
async_add_entities(entities)
config_entry.async_on_unload( def _check_device() -> None:
new_device_listener(coordinator, _add_new_entities, "alarm_zones") current_devices = set(coordinator.data["alarm_zones"])
) new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitVedoSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc
)
for sensor_desc in SENSOR_VEDO_TYPES
for device in coordinator.data["alarm_zones"].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity): class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):

View File

@@ -43,13 +43,11 @@
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"invalid_pin": "The provided PIN is invalid. It must be a 4-10 digit number.",
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
}, },
"error": { "error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"invalid_pin": "[%key:component::comelit::config::abort::invalid_pin%]",
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
} }
}, },

View File

@@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ComelitConfigEntry, ComelitSerialBridge from .coordinator import ComelitConfigEntry, ComelitSerialBridge
from .entity import ComelitBridgeBaseEntity from .entity import ComelitBridgeBaseEntity
from .utils import DeviceType, bridge_api_call, new_device_listener from .utils import bridge_api_call
# Coordinator is used to centralize the data updates # Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0 PARALLEL_UPDATES = 0
@@ -28,20 +28,35 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: entities: list[ComelitSwitchEntity] = []
"""Add entities for new monitors.""" entities.extend(
entities = [ ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
ComelitSwitchEntity(coordinator, device, config_entry.entry_id) for device in coordinator.data[IRRIGATION].values()
for device in coordinator.data[dev_type].values() )
if device in new_devices entities.extend(
] ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
if entities: for device in coordinator.data[OTHER].values()
async_add_entities(entities) )
async_add_entities(entities)
for dev_type in (IRRIGATION, OTHER): known_devices: dict[str, set[int]] = {
config_entry.async_on_unload( dev_type: set() for dev_type in (IRRIGATION, OTHER)
new_device_listener(coordinator, _add_new_entities, dev_type) }
)
def _check_device() -> None:
for dev_type in (IRRIGATION, OTHER):
current_devices = set(coordinator.data[dev_type])
new_devices = current_devices - known_devices[dev_type]
if new_devices:
known_devices[dev_type].update(new_devices)
async_add_entities(
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[dev_type].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity): class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):

View File

@@ -4,11 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps from functools import wraps
from typing import Any, Concatenate from typing import Any, Concatenate
from aiocomelit.api import ( from aiocomelit import ComelitSerialBridgeObject
ComelitSerialBridgeObject,
ComelitVedoAreaObject,
ComelitVedoZoneObject,
)
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
from aiohttp import ClientSession, CookieJar from aiohttp import ClientSession, CookieJar
@@ -23,11 +19,8 @@ from homeassistant.helpers import (
) )
from .const import _LOGGER, DOMAIN from .const import _LOGGER, DOMAIN
from .coordinator import ComelitBaseCoordinator
from .entity import ComelitBridgeBaseEntity from .entity import ComelitBridgeBaseEntity
DeviceType = ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
async def async_client_session(hass: HomeAssistant) -> ClientSession: async def async_client_session(hass: HomeAssistant) -> ClientSession:
"""Return a new aiohttp session.""" """Return a new aiohttp session."""
@@ -120,41 +113,3 @@ def bridge_api_call[_T: ComelitBridgeBaseEntity, **_P](
self.coordinator.config_entry.async_start_reauth(self.hass) self.coordinator.config_entry.async_start_reauth(self.hass)
return cmd_wrapper return cmd_wrapper
def new_device_listener(
coordinator: ComelitBaseCoordinator,
new_devices_callback: Callable[
[
list[
ComelitSerialBridgeObject
| ComelitVedoAreaObject
| ComelitVedoZoneObject
],
str,
],
None,
],
data_type: str,
) -> Callable[[], None]:
"""Subscribe to coordinator updates to check for new devices."""
known_devices: set[int] = set()
def _check_devices() -> None:
"""Check for new devices and call callback with any new monitors."""
if not coordinator.data:
return
new_devices: list[DeviceType] = []
for _id in coordinator.data[data_type]:
if _id not in known_devices:
known_devices.add(_id)
new_devices.append(coordinator.data[data_type][_id])
if new_devices:
new_devices_callback(new_devices, data_type)
# Check for devices immediately
_check_devices()
return coordinator.async_add_listener(_check_devices)

View File

@@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the config component.""" """Set up the config component."""
frontend.async_register_built_in_panel( frontend.async_register_built_in_panel(
hass, "config", "config", "mdi:cog", require_admin=True hass, "config", "config", "hass:cog", require_admin=True
) )
for panel in SECTIONS: for panel in SECTIONS:

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Callable from collections.abc import Callable
from http import HTTPStatus from http import HTTPStatus
import logging
from typing import Any, NoReturn from typing import Any, NoReturn
from aiohttp import web from aiohttp import web
@@ -24,12 +23,7 @@ from homeassistant.helpers.data_entry_flow import (
FlowManagerResourceView, FlowManagerResourceView,
) )
from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.json import ( from homeassistant.helpers.json import json_fragment
JSON_DUMP,
find_paths_unserializable_data,
json_bytes,
json_fragment,
)
from homeassistant.loader import ( from homeassistant.loader import (
Integration, Integration,
IntegrationNotFound, IntegrationNotFound,
@@ -37,9 +31,6 @@ from homeassistant.loader import (
async_get_integrations, async_get_integrations,
async_get_loaded_integration, async_get_loaded_integration,
) )
from homeassistant.util.json import format_unserializable_data
_LOGGER = logging.getLogger(__name__)
@callback @callback
@@ -411,40 +402,18 @@ def config_entries_flow_subscribe(
connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow( connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow(
async_on_flow_init_remove async_on_flow_init_remove
) )
try:
serialized_flows = [
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
)
]
except (ValueError, TypeError):
# If we can't serialize, we'll filter out unserializable flows
serialized_flows = []
for flw in hass.config_entries.flow.async_progress():
if flw["context"]["source"] in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
):
continue
try:
serialized_flows.append(
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
)
except (ValueError, TypeError):
_LOGGER.error(
"Unable to serialize to JSON. Bad data found at %s",
format_unserializable_data(
find_paths_unserializable_data(flw, dump=JSON_DUMP)
),
)
continue
connection.send_message( connection.send_message(
websocket_api.messages.construct_event_message( websocket_api.event_message(
msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]")) msg["id"],
[
{"type": None, "flow_id": flw["flow_id"], "flow": flw}
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
)
],
) )
) )
connection.send_result(msg["id"]) connection.send_result(msg["id"])

View File

@@ -20,13 +20,10 @@ from homeassistant.util.hass_dict import HassKey
from homeassistant.util.json import JsonObjectType from homeassistant.util.json import JsonObjectType
from . import trace from . import trace
from .const import ChatLogEventType
from .models import ConversationInput, ConversationResult from .models import ConversationInput, ConversationResult
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs") DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
SUBSCRIPTIONS: HassKey[list[Callable[[ChatLogEventType, dict[str, Any]], None]]] = (
HassKey("conversation_chat_log_subscriptions")
)
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
current_chat_log: ContextVar[ChatLog | None] = ContextVar( current_chat_log: ContextVar[ChatLog | None] = ContextVar(
@@ -34,37 +31,6 @@ current_chat_log: ContextVar[ChatLog | None] = ContextVar(
) )
@callback
def async_subscribe_chat_logs(
hass: HomeAssistant,
callback_func: Callable[[ChatLogEventType, dict[str, Any]], None],
) -> Callable[[], None]:
"""Subscribe to all chat logs."""
subscriptions = hass.data.get(SUBSCRIPTIONS)
if subscriptions is None:
subscriptions = []
hass.data[SUBSCRIPTIONS] = subscriptions
subscriptions.append(callback_func)
@callback
def unsubscribe() -> None:
"""Unsubscribe from chat logs."""
subscriptions.remove(callback_func)
return unsubscribe
@callback
def _async_notify_subscribers(
hass: HomeAssistant, event_type: ChatLogEventType, data: dict[str, Any]
) -> None:
"""Notify subscribers of a chat log event."""
if subscriptions := hass.data.get(SUBSCRIPTIONS):
for callback_func in subscriptions:
callback_func(event_type, data)
@contextmanager @contextmanager
def async_get_chat_log( def async_get_chat_log(
hass: HomeAssistant, hass: HomeAssistant,
@@ -97,8 +63,6 @@ def async_get_chat_log(
all_chat_logs = {} all_chat_logs = {}
hass.data[DATA_CHAT_LOGS] = all_chat_logs hass.data[DATA_CHAT_LOGS] = all_chat_logs
is_new_log = session.conversation_id not in all_chat_logs
if chat_log := all_chat_logs.get(session.conversation_id): if chat_log := all_chat_logs.get(session.conversation_id):
chat_log = replace(chat_log, content=chat_log.content.copy()) chat_log = replace(chat_log, content=chat_log.content.copy())
else: else:
@@ -107,12 +71,6 @@ def async_get_chat_log(
if chat_log_delta_listener: if chat_log_delta_listener:
chat_log.delta_listener = chat_log_delta_listener chat_log.delta_listener = chat_log_delta_listener
# Fire CREATED event for new chat logs before any content is added
if is_new_log:
_async_notify_subscribers(
hass, ChatLogEventType.CREATED, {"chat_log": chat_log.as_dict()}
)
if user_input is not None: if user_input is not None:
chat_log.async_add_user_content(UserContent(content=user_input.text)) chat_log.async_add_user_content(UserContent(content=user_input.text))
@@ -126,26 +84,14 @@ def async_get_chat_log(
LOGGER.debug( LOGGER.debug(
"Chat Log opened but no assistant message was added, ignoring update" "Chat Log opened but no assistant message was added, ignoring update"
) )
# If this was a new log but nothing was added, fire DELETED to clean up
if is_new_log:
_async_notify_subscribers(
hass,
ChatLogEventType.DELETED,
{"conversation_id": session.conversation_id},
)
return return
if is_new_log: if session.conversation_id not in all_chat_logs:
@callback @callback
def do_cleanup() -> None: def do_cleanup() -> None:
"""Handle cleanup.""" """Handle cleanup."""
all_chat_logs.pop(session.conversation_id) all_chat_logs.pop(session.conversation_id)
_async_notify_subscribers(
hass,
ChatLogEventType.DELETED,
{"conversation_id": session.conversation_id},
)
session.async_on_cleanup(do_cleanup) session.async_on_cleanup(do_cleanup)
@@ -154,13 +100,6 @@ def async_get_chat_log(
all_chat_logs[session.conversation_id] = chat_log all_chat_logs[session.conversation_id] = chat_log
# For new logs, CREATED was already fired before content was added
# For existing logs, fire UPDATED
if not is_new_log:
_async_notify_subscribers(
hass, ChatLogEventType.UPDATED, {"chat_log": chat_log.as_dict()}
)
class ConverseError(HomeAssistantError): class ConverseError(HomeAssistantError):
"""Error during initialization of conversation. """Error during initialization of conversation.
@@ -191,10 +130,6 @@ class SystemContent:
role: Literal["system"] = field(init=False, default="system") role: Literal["system"] = field(init=False, default="system")
content: str content: str
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the content."""
return {"role": self.role, "content": self.content}
@dataclass(frozen=True) @dataclass(frozen=True)
class UserContent: class UserContent:
@@ -204,15 +139,6 @@ class UserContent:
content: str content: str
attachments: list[Attachment] | None = field(default=None) attachments: list[Attachment] | None = field(default=None)
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the content."""
result: dict[str, Any] = {"role": self.role, "content": self.content}
if self.attachments:
result["attachments"] = [
attachment.as_dict() for attachment in self.attachments
]
return result
@dataclass(frozen=True) @dataclass(frozen=True)
class Attachment: class Attachment:
@@ -227,14 +153,6 @@ class Attachment:
path: Path path: Path
"""Path to the attachment on disk.""" """Path to the attachment on disk."""
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the attachment."""
return {
"media_content_id": self.media_content_id,
"mime_type": self.mime_type,
"path": str(self.path),
}
@dataclass(frozen=True) @dataclass(frozen=True)
class AssistantContent: class AssistantContent:
@@ -247,17 +165,6 @@ class AssistantContent:
tool_calls: list[llm.ToolInput] | None = None tool_calls: list[llm.ToolInput] | None = None
native: Any = None native: Any = None
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the content."""
result: dict[str, Any] = {"role": self.role, "agent_id": self.agent_id}
if self.content:
result["content"] = self.content
if self.thinking_content:
result["thinking_content"] = self.thinking_content
if self.tool_calls:
result["tool_calls"] = self.tool_calls
return result
@dataclass(frozen=True) @dataclass(frozen=True)
class ToolResultContent: class ToolResultContent:
@@ -269,16 +176,6 @@ class ToolResultContent:
tool_name: str tool_name: str
tool_result: JsonObjectType tool_result: JsonObjectType
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the content."""
return {
"role": self.role,
"agent_id": self.agent_id,
"tool_call_id": self.tool_call_id,
"tool_name": self.tool_name,
"tool_result": self.tool_result,
}
type Content = SystemContent | UserContent | AssistantContent | ToolResultContent type Content = SystemContent | UserContent | AssistantContent | ToolResultContent
@@ -314,13 +211,6 @@ class ChatLog:
delta_listener: Callable[[ChatLog, dict], None] | None = None delta_listener: Callable[[ChatLog, dict], None] | None = None
llm_input_provided_index = 0 llm_input_provided_index = 0
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the chat log."""
return {
"conversation_id": self.conversation_id,
"continue_conversation": self.continue_conversation,
}
@property @property
def continue_conversation(self) -> bool: def continue_conversation(self) -> bool:
"""Return whether the conversation should continue.""" """Return whether the conversation should continue."""
@@ -351,11 +241,6 @@ class ChatLog:
"""Add user content to the log.""" """Add user content to the log."""
LOGGER.debug("Adding user content: %s", content) LOGGER.debug("Adding user content: %s", content)
self.content.append(content) self.content.append(content)
_async_notify_subscribers(
self.hass,
ChatLogEventType.CONTENT_ADDED,
{"conversation_id": self.conversation_id, "content": content.as_dict()},
)
@callback @callback
def async_add_assistant_content_without_tools( def async_add_assistant_content_without_tools(
@@ -374,11 +259,6 @@ class ChatLog:
): ):
raise ValueError("Non-external tool calls not allowed") raise ValueError("Non-external tool calls not allowed")
self.content.append(content) self.content.append(content)
_async_notify_subscribers(
self.hass,
ChatLogEventType.CONTENT_ADDED,
{"conversation_id": self.conversation_id, "content": content.as_dict()},
)
async def async_add_assistant_content( async def async_add_assistant_content(
self, self,
@@ -437,14 +317,6 @@ class ChatLog:
tool_result=tool_result, tool_result=tool_result,
) )
self.content.append(response_content) self.content.append(response_content)
_async_notify_subscribers(
self.hass,
ChatLogEventType.CONTENT_ADDED,
{
"conversation_id": self.conversation_id,
"content": response_content.as_dict(),
},
)
yield response_content yield response_content
async def async_add_delta_content_stream( async def async_add_delta_content_stream(
@@ -642,7 +514,7 @@ class ChatLog:
"""Set the LLM system prompt.""" """Set the LLM system prompt."""
llm_api: llm.APIInstance | None = None llm_api: llm.APIInstance | None = None
if not user_llm_hass_api: if user_llm_hass_api is None:
pass pass
elif isinstance(user_llm_hass_api, llm.API): elif isinstance(user_llm_hass_api, llm.API):
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context) llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)
@@ -718,11 +590,6 @@ class ChatLog:
self.llm_api = llm_api self.llm_api = llm_api
self.extra_system_prompt = extra_system_prompt self.extra_system_prompt = extra_system_prompt
self.content[0] = SystemContent(content=prompt) self.content[0] = SystemContent(content=prompt)
_async_notify_subscribers(
self.hass,
ChatLogEventType.UPDATED,
{"conversation_id": self.conversation_id, "chat_log": self.as_dict()},
)
LOGGER.debug("Prompt: %s", self.content) LOGGER.debug("Prompt: %s", self.content)
LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None) LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None)

View File

@@ -26,19 +26,7 @@ SERVICE_RELOAD = "reload"
DATA_COMPONENT: HassKey[EntityComponent[ConversationEntity]] = HassKey(DOMAIN) DATA_COMPONENT: HassKey[EntityComponent[ConversationEntity]] = HassKey(DOMAIN)
from homeassistant.const import StrEnum
class ConversationEntityFeature(IntFlag): class ConversationEntityFeature(IntFlag):
"""Supported features of the conversation entity.""" """Supported features of the conversation entity."""
CONTROL = 1 CONTROL = 1
class ChatLogEventType(StrEnum):
"""Chat log event type."""
CREATED = "created"
UPDATED = "updated"
DELETED = "deleted"
CONTENT_ADDED = "content_added"

View File

@@ -38,30 +38,22 @@ from home_assistant_intents import (
ErrorKey, ErrorKey,
FuzzyConfig, FuzzyConfig,
FuzzyLanguageResponses, FuzzyLanguageResponses,
LanguageScores,
get_fuzzy_config, get_fuzzy_config,
get_fuzzy_language, get_fuzzy_language,
get_intents, get_intents,
get_language_scores,
get_languages, get_languages,
) )
import yaml import yaml
from homeassistant import core
from homeassistant.components.homeassistant.exposed_entities import ( from homeassistant.components.homeassistant.exposed_entities import (
async_listen_entity_updates, async_listen_entity_updates,
async_should_expose, async_should_expose,
) )
from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL
from homeassistant.core import ( from homeassistant.core import Event, callback
Event,
EventStateChangedData,
HomeAssistant,
State,
callback,
)
from homeassistant.helpers import ( from homeassistant.helpers import (
area_registry as ar, area_registry as ar,
config_validation as cv,
device_registry as dr, device_registry as dr,
entity_registry as er, entity_registry as er,
floor_registry as fr, floor_registry as fr,
@@ -200,7 +192,7 @@ class IntentCache:
async def async_setup_default_agent( async def async_setup_default_agent(
hass: HomeAssistant, hass: core.HomeAssistant,
entity_component: EntityComponent[ConversationEntity], entity_component: EntityComponent[ConversationEntity],
config_intents: dict[str, Any], config_intents: dict[str, Any],
) -> None: ) -> None:
@@ -209,13 +201,15 @@ async def async_setup_default_agent(
await entity_component.async_add_entities([agent]) await entity_component.async_add_entities([agent])
await get_agent_manager(hass).async_setup_default_agent(agent) await get_agent_manager(hass).async_setup_default_agent(agent)
@callback @core.callback
def async_entity_state_listener(event: Event[EventStateChangedData]) -> None: def async_entity_state_listener(
event: core.Event[core.EventStateChangedData],
) -> None:
"""Set expose flag on new entities.""" """Set expose flag on new entities."""
async_should_expose(hass, DOMAIN, event.data["entity_id"]) async_should_expose(hass, DOMAIN, event.data["entity_id"])
@callback @core.callback
def async_hass_started(hass: HomeAssistant) -> None: def async_hass_started(hass: core.HomeAssistant) -> None:
"""Set expose flag on all entities.""" """Set expose flag on all entities."""
for state in hass.states.async_all(): for state in hass.states.async_all():
async_should_expose(hass, DOMAIN, state.entity_id) async_should_expose(hass, DOMAIN, state.entity_id)
@@ -230,7 +224,9 @@ class DefaultAgent(ConversationEntity):
_attr_name = "Home Assistant" _attr_name = "Home Assistant"
_attr_supported_features = ConversationEntityFeature.CONTROL _attr_supported_features = ConversationEntityFeature.CONTROL
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None: def __init__(
self, hass: core.HomeAssistant, config_intents: dict[str, Any]
) -> None:
"""Initialize the default agent.""" """Initialize the default agent."""
self.hass = hass self.hass = hass
self._lang_intents: dict[str, LanguageIntents | object] = {} self._lang_intents: dict[str, LanguageIntents | object] = {}
@@ -263,7 +259,7 @@ class DefaultAgent(ConversationEntity):
"""Return a list of supported languages.""" """Return a list of supported languages."""
return get_languages() return get_languages()
@callback @core.callback
def _filter_entity_registry_changes( def _filter_entity_registry_changes(
self, event_data: er.EventEntityRegistryUpdatedData self, event_data: er.EventEntityRegistryUpdatedData
) -> bool: ) -> bool:
@@ -272,12 +268,12 @@ class DefaultAgent(ConversationEntity):
field in event_data["changes"] for field in _ENTITY_REGISTRY_UPDATE_FIELDS field in event_data["changes"] for field in _ENTITY_REGISTRY_UPDATE_FIELDS
) )
@callback @core.callback
def _filter_state_changes(self, event_data: EventStateChangedData) -> bool: def _filter_state_changes(self, event_data: core.EventStateChangedData) -> bool:
"""Filter state changed events.""" """Filter state changed events."""
return not event_data["old_state"] or not event_data["new_state"] return not event_data["old_state"] or not event_data["new_state"]
@callback @core.callback
def _listen_clear_slot_list(self) -> None: def _listen_clear_slot_list(self) -> None:
"""Listen for changes that can invalidate slot list.""" """Listen for changes that can invalidate slot list."""
assert self._unsub_clear_slot_list is None assert self._unsub_clear_slot_list is None
@@ -346,81 +342,6 @@ class DefaultAgent(ConversationEntity):
return result return result
async def async_debug_recognize(
self, user_input: ConversationInput
) -> dict[str, Any] | None:
"""Debug recognize from user input."""
result_dict: dict[str, Any] | None = None
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
result_dict = {
# Matched a user-defined sentence trigger.
# We can't provide the response here without executing the
# trigger.
"match": True,
"source": "trigger",
"sentence_template": trigger_result.sentence_template or "",
}
elif intent_result := await self.async_recognize_intent(user_input):
successful_match = not intent_result.unmatched_entities
result_dict = {
# Name of the matching intent (or the closest)
"intent": {
"name": intent_result.intent.name,
},
# Slot values that would be received by the intent
"slots": { # direct access to values
entity_key: entity.text or entity.value
for entity_key, entity in intent_result.entities.items()
},
# Extra slot details, such as the originally matched text
"details": {
entity_key: {
"name": entity.name,
"value": entity.value,
"text": entity.text,
}
for entity_key, entity in intent_result.entities.items()
},
# Entities/areas/etc. that would be targeted
"targets": {},
# True if match was successful
"match": successful_match,
# Text of the sentence template that matched (or was closest)
"sentence_template": "",
# When match is incomplete, this will contain the best slot guesses
"unmatched_slots": _get_unmatched_slots(intent_result),
# True if match was not exact
"fuzzy_match": False,
}
if successful_match:
result_dict["targets"] = {
state.entity_id: {"matched": is_matched}
for state, is_matched in _get_debug_targets(
self.hass, intent_result
)
}
if intent_result.intent_sentence is not None:
result_dict["sentence_template"] = intent_result.intent_sentence.text
if intent_result.intent_metadata:
# Inspect metadata to determine if this matched a custom sentence
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
result_dict["source"] = "custom"
result_dict["file"] = intent_result.intent_metadata.get(
METADATA_CUSTOM_FILE
)
else:
result_dict["source"] = "builtin"
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
METADATA_FUZZY_MATCH, False
)
return result_dict
async def _async_handle_message( async def _async_handle_message(
self, self,
user_input: ConversationInput, user_input: ConversationInput,
@@ -969,7 +890,7 @@ class DefaultAgent(ConversationEntity):
) -> str: ) -> str:
# Get first matched or unmatched state. # Get first matched or unmatched state.
# This is available in the response template as "state". # This is available in the response template as "state".
state1: State | None = None state1: core.State | None = None
if intent_response.matched_states: if intent_response.matched_states:
state1 = intent_response.matched_states[0] state1 = intent_response.matched_states[0]
elif intent_response.unmatched_states: elif intent_response.unmatched_states:
@@ -1607,10 +1528,6 @@ class DefaultAgent(ConversationEntity):
return None return None
return response return response
async def async_get_language_scores(self) -> dict[str, LanguageScores]:
"""Get support scores per language."""
return await self.hass.async_add_executor_job(get_language_scores)
def _make_error_result( def _make_error_result(
language: str, language: str,
@@ -1672,7 +1589,7 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str
def _get_match_error_response( def _get_match_error_response(
hass: HomeAssistant, hass: core.HomeAssistant,
match_error: intent.MatchFailedError, match_error: intent.MatchFailedError,
) -> tuple[ErrorKey, dict[str, Any]]: ) -> tuple[ErrorKey, dict[str, Any]]:
"""Return key and template arguments for error when target matching fails.""" """Return key and template arguments for error when target matching fails."""
@@ -1807,75 +1724,3 @@ def _collect_list_references(expression: Expression, list_names: set[str]) -> No
elif isinstance(expression, ListReference): elif isinstance(expression, ListReference):
# {list} # {list}
list_names.add(expression.slot_name) list_names.add(expression.slot_name)
def _get_debug_targets(
hass: HomeAssistant,
result: RecognizeResult,
) -> Iterable[tuple[State, bool]]:
"""Yield state/is_matched pairs for a hassil recognition."""
entities = result.entities
name: str | None = None
area_name: str | None = None
domains: set[str] | None = None
device_classes: set[str] | None = None
state_names: set[str] | None = None
if "name" in entities:
name = str(entities["name"].value)
if "area" in entities:
area_name = str(entities["area"].value)
if "domain" in entities:
domains = set(cv.ensure_list(entities["domain"].value))
if "device_class" in entities:
device_classes = set(cv.ensure_list(entities["device_class"].value))
if "state" in entities:
# HassGetState only
state_names = set(cv.ensure_list(entities["state"].value))
if (
(name is None)
and (area_name is None)
and (not domains)
and (not device_classes)
and (not state_names)
):
# Avoid "matching" all entities when there is no filter
return
states = intent.async_match_states(
hass,
name=name,
area_name=area_name,
domains=domains,
device_classes=device_classes,
)
for state in states:
# For queries, a target is "matched" based on its state
is_matched = (state_names is None) or (state.state in state_names)
yield state, is_matched
def _get_unmatched_slots(
result: RecognizeResult,
) -> dict[str, str | int | float]:
"""Return a dict of unmatched text/range slot entities."""
unmatched_slots: dict[str, str | int | float] = {}
for entity in result.unmatched_entities_list:
if isinstance(entity, UnmatchedTextEntity):
if entity.text == MISSING_ENTITY:
# Don't report <missing> since these are just missing context
# slots.
continue
unmatched_slots[entity.name] = entity.text
elif isinstance(entity, UnmatchedRangeEntity):
unmatched_slots[entity.name] = entity.value
return unmatched_slots

View File

@@ -2,16 +2,21 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Iterable
from dataclasses import asdict from dataclasses import asdict
from typing import Any from typing import Any
from aiohttp import web from aiohttp import web
from hassil.recognize import MISSING_ENTITY, RecognizeResult
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
from home_assistant_intents import get_language_scores
import voluptuous as vol import voluptuous as vol
from homeassistant.components import http, websocket_api from homeassistant.components import http, websocket_api
from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.const import MATCH_ALL from homeassistant.const import MATCH_ALL
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, State, callback
from homeassistant.helpers import config_validation as cv, intent
from homeassistant.util import language as language_util from homeassistant.util import language as language_util
from .agent_manager import ( from .agent_manager import (
@@ -20,8 +25,12 @@ from .agent_manager import (
async_get_agent, async_get_agent,
get_agent_manager, get_agent_manager,
) )
from .chat_log import async_subscribe_chat_logs
from .const import DATA_COMPONENT from .const import DATA_COMPONENT
from .default_agent import (
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
METADATA_FUZZY_MATCH,
)
from .entity import ConversationEntity from .entity import ConversationEntity
from .models import ConversationInput from .models import ConversationInput
@@ -36,7 +45,6 @@ def async_setup(hass: HomeAssistant) -> None:
websocket_api.async_register_command(hass, websocket_list_sentences) websocket_api.async_register_command(hass, websocket_list_sentences)
websocket_api.async_register_command(hass, websocket_hass_agent_debug) websocket_api.async_register_command(hass, websocket_hass_agent_debug)
websocket_api.async_register_command(hass, websocket_hass_agent_language_scores) websocket_api.async_register_command(hass, websocket_hass_agent_language_scores)
websocket_api.async_register_command(hass, websocket_subscribe_chat_logs)
@websocket_api.websocket_command( @websocket_api.websocket_command(
@@ -198,12 +206,150 @@ async def websocket_hass_agent_debug(
language=msg.get("language", hass.config.language), language=msg.get("language", hass.config.language),
agent_id=agent.entity_id, agent_id=agent.entity_id,
) )
result_dict = await agent.async_debug_recognize(user_input) result_dict: dict[str, Any] | None = None
if trigger_result := await agent.async_recognize_sentence_trigger(user_input):
result_dict = {
# Matched a user-defined sentence trigger.
# We can't provide the response here without executing the
# trigger.
"match": True,
"source": "trigger",
"sentence_template": trigger_result.sentence_template or "",
}
elif intent_result := await agent.async_recognize_intent(user_input):
successful_match = not intent_result.unmatched_entities
result_dict = {
# Name of the matching intent (or the closest)
"intent": {
"name": intent_result.intent.name,
},
# Slot values that would be received by the intent
"slots": { # direct access to values
entity_key: entity.text or entity.value
for entity_key, entity in intent_result.entities.items()
},
# Extra slot details, such as the originally matched text
"details": {
entity_key: {
"name": entity.name,
"value": entity.value,
"text": entity.text,
}
for entity_key, entity in intent_result.entities.items()
},
# Entities/areas/etc. that would be targeted
"targets": {},
# True if match was successful
"match": successful_match,
# Text of the sentence template that matched (or was closest)
"sentence_template": "",
# When match is incomplete, this will contain the best slot guesses
"unmatched_slots": _get_unmatched_slots(intent_result),
# True if match was not exact
"fuzzy_match": False,
}
if successful_match:
result_dict["targets"] = {
state.entity_id: {"matched": is_matched}
for state, is_matched in _get_debug_targets(hass, intent_result)
}
if intent_result.intent_sentence is not None:
result_dict["sentence_template"] = intent_result.intent_sentence.text
if intent_result.intent_metadata:
# Inspect metadata to determine if this matched a custom sentence
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
result_dict["source"] = "custom"
result_dict["file"] = intent_result.intent_metadata.get(
METADATA_CUSTOM_FILE
)
else:
result_dict["source"] = "builtin"
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
METADATA_FUZZY_MATCH, False
)
result_dicts.append(result_dict) result_dicts.append(result_dict)
connection.send_result(msg["id"], {"results": result_dicts}) connection.send_result(msg["id"], {"results": result_dicts})
def _get_debug_targets(
hass: HomeAssistant,
result: RecognizeResult,
) -> Iterable[tuple[State, bool]]:
"""Yield state/is_matched pairs for a hassil recognition."""
entities = result.entities
name: str | None = None
area_name: str | None = None
domains: set[str] | None = None
device_classes: set[str] | None = None
state_names: set[str] | None = None
if "name" in entities:
name = str(entities["name"].value)
if "area" in entities:
area_name = str(entities["area"].value)
if "domain" in entities:
domains = set(cv.ensure_list(entities["domain"].value))
if "device_class" in entities:
device_classes = set(cv.ensure_list(entities["device_class"].value))
if "state" in entities:
# HassGetState only
state_names = set(cv.ensure_list(entities["state"].value))
if (
(name is None)
and (area_name is None)
and (not domains)
and (not device_classes)
and (not state_names)
):
# Avoid "matching" all entities when there is no filter
return
states = intent.async_match_states(
hass,
name=name,
area_name=area_name,
domains=domains,
device_classes=device_classes,
)
for state in states:
# For queries, a target is "matched" based on its state
is_matched = (state_names is None) or (state.state in state_names)
yield state, is_matched
def _get_unmatched_slots(
result: RecognizeResult,
) -> dict[str, str | int | float]:
"""Return a dict of unmatched text/range slot entities."""
unmatched_slots: dict[str, str | int | float] = {}
for entity in result.unmatched_entities_list:
if isinstance(entity, UnmatchedTextEntity):
if entity.text == MISSING_ENTITY:
# Don't report <missing> since these are just missing context
# slots.
continue
unmatched_slots[entity.name] = entity.text
elif isinstance(entity, UnmatchedRangeEntity):
unmatched_slots[entity.name] = entity.value
return unmatched_slots
@websocket_api.websocket_command( @websocket_api.websocket_command(
{ {
vol.Required("type"): "conversation/agent/homeassistant/language_scores", vol.Required("type"): "conversation/agent/homeassistant/language_scores",
@@ -218,13 +364,10 @@ async def websocket_hass_agent_language_scores(
msg: dict[str, Any], msg: dict[str, Any],
) -> None: ) -> None:
"""Get support scores per language.""" """Get support scores per language."""
agent = get_agent_manager(hass).default_agent
assert agent is not None
language = msg.get("language", hass.config.language) language = msg.get("language", hass.config.language)
country = msg.get("country", hass.config.country) country = msg.get("country", hass.config.country)
scores = await agent.async_get_language_scores() scores = await hass.async_add_executor_job(get_language_scores)
matching_langs = language_util.matches(language, scores.keys(), country=country) matching_langs = language_util.matches(language, scores.keys(), country=country)
preferred_lang = matching_langs[0] if matching_langs else language preferred_lang = matching_langs[0] if matching_langs else language
result = { result = {
@@ -267,28 +410,3 @@ class ConversationProcessView(http.HomeAssistantView):
) )
return self.json(result.as_dict()) return self.json(result.as_dict())
@websocket_api.websocket_command(
{
vol.Required("type"): "conversation/chat_log/subscribe",
}
)
@websocket_api.require_admin
def websocket_subscribe_chat_logs(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to all chat logs."""
@callback
def forward_events(event_type: str, data: dict) -> None:
"""Forward chat log events to websocket connection."""
connection.send_message(
{"type": "event", "event_type": event_type, "data": data}
)
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
connection.subscriptions[msg["id"]] = unsubscribe
connection.send_result(msg["id"])

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation", "documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity", "integration_type": "entity",
"quality_scale": "internal", "quality_scale": "internal",
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.10.1"] "requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.24"]
} }

View File

@@ -13,7 +13,7 @@ from propcache.api import cached_property
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ( from homeassistant.const import ( # noqa: F401
SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT, SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER, SERVICE_OPEN_COVER,
@@ -24,9 +24,19 @@ from homeassistant.const import (
SERVICE_STOP_COVER_TILT, SERVICE_STOP_COVER_TILT,
SERVICE_TOGGLE, SERVICE_TOGGLE,
SERVICE_TOGGLE_COVER_TILT, SERVICE_TOGGLE_COVER_TILT,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
) )
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.deprecation import (
DeprecatedConstantEnum,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.typing import ConfigType
@@ -53,6 +63,15 @@ class CoverState(StrEnum):
OPENING = "opening" OPENING = "opening"
# STATE_* below are deprecated as of 2024.11
# when imported from homeassistant.components.cover
# use the CoverState enum instead.
_DEPRECATED_STATE_CLOSED = DeprecatedConstantEnum(CoverState.CLOSED, "2025.11")
_DEPRECATED_STATE_CLOSING = DeprecatedConstantEnum(CoverState.CLOSING, "2025.11")
_DEPRECATED_STATE_OPEN = DeprecatedConstantEnum(CoverState.OPEN, "2025.11")
_DEPRECATED_STATE_OPENING = DeprecatedConstantEnum(CoverState.OPENING, "2025.11")
class CoverDeviceClass(StrEnum): class CoverDeviceClass(StrEnum):
"""Device class for cover.""" """Device class for cover."""
@@ -444,3 +463,11 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
return ( return (
fns["close"] if self._cover_is_last_toggle_direction_open else fns["open"] fns["close"] if self._cover_is_last_toggle_direction_open else fns["open"]
) )
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = ft.partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@@ -7,5 +7,5 @@
"integration_type": "hub", "integration_type": "hub",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"quality_scale": "bronze", "quality_scale": "bronze",
"requirements": ["pycync==0.4.1"] "requirements": ["pycync==0.4.0"]
} }

View File

@@ -23,7 +23,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.util.ssl import client_context_no_verify from homeassistant.util.ssl import client_context_no_verify
from .const import KEY_MAC, TIMEOUT_SEC from .const import KEY_MAC, TIMEOUT
from .coordinator import DaikinConfigEntry, DaikinCoordinator from .coordinator import DaikinConfigEntry, DaikinCoordinator
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -42,7 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
session = async_get_clientsession(hass) session = async_get_clientsession(hass)
host = conf[CONF_HOST] host = conf[CONF_HOST]
try: try:
async with asyncio.timeout(TIMEOUT_SEC): async with asyncio.timeout(TIMEOUT):
device: Appliance = await DaikinFactory( device: Appliance = await DaikinFactory(
host, host,
session, session,
@@ -53,7 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
) )
_LOGGER.debug("Connection to %s successful", host) _LOGGER.debug("Connection to %s successful", host)
except TimeoutError as err: except TimeoutError as err:
_LOGGER.debug("Connection to %s timed out in %s seconds", host, TIMEOUT_SEC) _LOGGER.debug("Connection to %s timed out in 60 seconds", host)
raise ConfigEntryNotReady from err raise ConfigEntryNotReady from err
except ClientConnectionError as err: except ClientConnectionError as err:
_LOGGER.debug("ClientConnectionError to %s", host) _LOGGER.debug("ClientConnectionError to %s", host)

View File

@@ -20,7 +20,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from homeassistant.util.ssl import client_context_no_verify from homeassistant.util.ssl import client_context_no_verify
from .const import DOMAIN, KEY_MAC, TIMEOUT_SEC from .const import DOMAIN, KEY_MAC, TIMEOUT
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -84,7 +84,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
password = None password = None
try: try:
async with asyncio.timeout(TIMEOUT_SEC): async with asyncio.timeout(TIMEOUT):
device: Appliance = await DaikinFactory( device: Appliance = await DaikinFactory(
host, host,
async_get_clientsession(self.hass), async_get_clientsession(self.hass),

View File

@@ -24,4 +24,4 @@ ATTR_STATE_OFF = "off"
KEY_MAC = "mac" KEY_MAC = "mac"
KEY_IP = "ip" KEY_IP = "ip"
TIMEOUT_SEC = 120 TIMEOUT = 60

View File

@@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN, TIMEOUT_SEC from .const import DOMAIN
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -28,7 +28,7 @@ class DaikinCoordinator(DataUpdateCoordinator[None]):
_LOGGER, _LOGGER,
config_entry=entry, config_entry=entry,
name=device.values.get("name", DOMAIN), name=device.values.get("name", DOMAIN),
update_interval=timedelta(seconds=TIMEOUT_SEC), update_interval=timedelta(seconds=60),
) )
self.device = device self.device = device

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/daikin", "documentation": "https://www.home-assistant.io/integrations/daikin",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["pydaikin"], "loggers": ["pydaikin"],
"requirements": ["pydaikin==2.17.1"], "requirements": ["pydaikin==2.16.0"],
"zeroconf": ["_dkapi._tcp.local."] "zeroconf": ["_dkapi._tcp.local."]
} }

View File

@@ -126,7 +126,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
self._attr_translation_key = "button" self._attr_translation_key = "button"
self._attr_translation_placeholders = {"key": str(key)} self._attr_translation_placeholders = {"key": str(key)}
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the binary sensor state.""" """Update the binary sensor state."""
if ( if (
message[0] == self._remote_control_property.element_uid message[0] == self._remote_control_property.element_uid

View File

@@ -48,6 +48,7 @@ class DevoloDeviceEntity(Entity):
) )
self.subscriber: Subscriber | None = None self.subscriber: Subscriber | None = None
self.sync_callback = self._sync
self._value: float self._value: float
@@ -68,7 +69,7 @@ class DevoloDeviceEntity(Entity):
self._device_instance.uid, self.subscriber self._device_instance.uid, self.subscriber
) )
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the state.""" """Update the state."""
if message[0] == self._attr_unique_id: if message[0] == self._attr_unique_id:
self._value = message[1] self._value = message[1]

View File

@@ -185,7 +185,7 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity):
""" """
return f"{self._attr_unique_id}_{self._sensor_type}" return f"{self._attr_unique_id}_{self._sensor_type}"
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the consumption sensor state.""" """Update the consumption sensor state."""
if message[0] == self._attr_unique_id: if message[0] == self._attr_unique_id:
self._value = getattr( self._value = getattr(

View File

@@ -13,3 +13,8 @@ class Subscriber:
"""Initiate the subscriber.""" """Initiate the subscriber."""
self.name = name self.name = name
self.callback = callback self.callback = callback
def update(self, message: str) -> None:
"""Trigger hass to update the device."""
_LOGGER.debug('%s got message "%s"', self.name, message)
self.callback(message)

View File

@@ -64,7 +64,7 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
"""Switch off the device.""" """Switch off the device."""
self._binary_switch_property.set(state=False) self._binary_switch_property.set(state=False)
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the binary switch state and consumption.""" """Update the binary switch state and consumption."""
if message[0].startswith("devolo.BinarySwitch"): if message[0].startswith("devolo.BinarySwitch"):
self._attr_is_on = self._device_instance.binary_switch_property[ self._attr_is_on = self._device_instance.binary_switch_property[

View File

@@ -17,6 +17,6 @@
"requirements": [ "requirements": [
"aiodhcpwatcher==1.2.1", "aiodhcpwatcher==1.2.1",
"aiodiscover==2.7.1", "aiodiscover==2.7.1",
"cached-ipaddress==1.0.1" "cached-ipaddress==0.10.0"
] ]
} }

Some files were not shown because too many files have changed in this diff Show More