Compare commits

..

4 Commits

Author SHA1 Message Date
Ludovic BOUÉ
08eee8d479 Update tests/components/matter/test_switch.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-29 11:40:54 +01:00
Ludovic BOUÉ
6bbaae7235 Update homeassistant/components/matter/switch.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-29 11:40:39 +01:00
Ludovic BOUÉ
86a5dff3f5 Add test for MatterError handling in numeric switch with Eve Thermo fixture 2025-10-29 10:15:35 +00:00
Ludovic BOUÉ
34e137005d Add error handling for Matter switch commands 2025-10-29 10:08:43 +00:00
1184 changed files with 18299 additions and 67788 deletions

View File

@@ -88,10 +88,6 @@ jobs:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
exclude:
- arch: armv7
- arch: armhf
- arch: i386
steps:
- name: Checkout the repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -166,6 +162,18 @@ jobs:
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
fi
- name: Adjustments for armhf
if: matrix.arch == 'armhf'
run: |
# Pandas has issues building on armhf, it is expected they
# will drop the platform in the near future (they consider it
# "flimsy" on 386). The following packages depend on pandas,
# so we comment them out.
sed -i "s|env-canada|# env-canada|g" requirements_all.txt
sed -i "s|noaa-coops|# noaa-coops|g" requirements_all.txt
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
@@ -218,11 +226,19 @@ jobs:
- odroid-c4
- odroid-m1
- odroid-n2
- odroid-xu
- qemuarm
- qemuarm-64
- qemux86
- qemux86-64
- raspberrypi
- raspberrypi2
- raspberrypi3
- raspberrypi3-64
- raspberrypi4
- raspberrypi4-64
- raspberrypi5-64
- tinker
- yellow
- green
steps:
@@ -281,7 +297,6 @@ jobs:
key-description: "Home Assistant Core"
version: ${{ needs.init.outputs.version }}
channel: ${{ needs.init.outputs.channel }}
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
- name: Update version file (stable -> beta)
if: needs.init.outputs.channel == 'stable'
@@ -291,7 +306,6 @@ jobs:
key-description: "Home Assistant Core"
version: ${{ needs.init.outputs.version }}
channel: beta
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
publish_container:
name: Publish meta container for ${{ matrix.registry }}
@@ -343,12 +357,27 @@ jobs:
docker manifest create "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
"${registry}/i386-homeassistant:${tag_r}" \
"${registry}/armhf-homeassistant:${tag_r}" \
"${registry}/armv7-homeassistant:${tag_r}" \
"${registry}/aarch64-homeassistant:${tag_r}"
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
--os linux --arch amd64
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/i386-homeassistant:${tag_r}" \
--os linux --arch 386
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/armhf-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v6
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/armv7-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v7
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/aarch64-homeassistant:${tag_r}" \
--os linux --arch arm64 --variant=v8
@@ -376,14 +405,23 @@ jobs:
# Pull images from github container registry and verify signature
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
# Upload images to dockerhub
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
fi

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 1
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.12"
HA_SHORT_VERSION: "2025.11"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
# 10.3 is the oldest supported version
@@ -502,6 +502,7 @@ jobs:
libavfilter-dev \
libavformat-dev \
libavutil-dev \
libgammu-dev \
libswresample-dev \
libswscale-dev \
libudev-dev
@@ -622,7 +623,7 @@ jobs:
steps:
- *checkout
- name: Dependency review
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
with:
license-check: false # We use our own license audit checks
@@ -800,7 +801,8 @@ jobs:
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
bluez \
ffmpeg \
libturbojpeg
libturbojpeg \
libgammu-dev
- *checkout
- *setup-python-default
- *cache-restore-python-default
@@ -851,6 +853,7 @@ jobs:
bluez \
ffmpeg \
libturbojpeg \
libgammu-dev \
libxml2-utils
- *checkout
- *setup-python-matrix
@@ -1230,6 +1233,7 @@ jobs:
bluez \
ffmpeg \
libturbojpeg \
libgammu-dev \
libxml2-utils
- *checkout
- *setup-python-matrix

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
with:
category: "/language:python"

View File

@@ -228,7 +228,7 @@ jobs:
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"

View File

@@ -107,7 +107,6 @@ homeassistant.components.automation.*
homeassistant.components.awair.*
homeassistant.components.axis.*
homeassistant.components.azure_storage.*
homeassistant.components.backblaze_b2.*
homeassistant.components.backup.*
homeassistant.components.baf.*
homeassistant.components.bang_olufsen.*
@@ -396,6 +395,7 @@ homeassistant.components.otbr.*
homeassistant.components.overkiz.*
homeassistant.components.overseerr.*
homeassistant.components.p1_monitor.*
homeassistant.components.pandora.*
homeassistant.components.panel_custom.*
homeassistant.components.paperless_ngx.*
homeassistant.components.peblar.*

24
CODEOWNERS generated
View File

@@ -196,8 +196,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/azure_service_bus/ @hfurubotten
/homeassistant/components/azure_storage/ @zweckj
/tests/components/azure_storage/ @zweckj
/homeassistant/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
/tests/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
/homeassistant/components/backup/ @home-assistant/core
/tests/components/backup/ @home-assistant/core
/homeassistant/components/baf/ @bdraco @jfroy
@@ -318,6 +316,8 @@ build.json @home-assistant/supervisor
/tests/components/cpuspeed/ @fabaff
/homeassistant/components/crownstone/ @Crownstone @RicArch97
/tests/components/crownstone/ @Crownstone @RicArch97
/homeassistant/components/cups/ @fabaff
/tests/components/cups/ @fabaff
/homeassistant/components/cync/ @Kinachi249
/tests/components/cync/ @Kinachi249
/homeassistant/components/daikin/ @fredrike
@@ -510,6 +510,8 @@ build.json @home-assistant/supervisor
/tests/components/fjaraskupan/ @elupus
/homeassistant/components/flexit_bacnet/ @lellky @piotrbulinski
/tests/components/flexit_bacnet/ @lellky @piotrbulinski
/homeassistant/components/flick_electric/ @ZephireNZ
/tests/components/flick_electric/ @ZephireNZ
/homeassistant/components/flipr/ @cnico
/tests/components/flipr/ @cnico
/homeassistant/components/flo/ @dmulcahey
@@ -1017,8 +1019,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/msteams/ @peroyvind
/homeassistant/components/mullvad/ @meichthys
/tests/components/mullvad/ @meichthys
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
/tests/components/music_assistant/ @music-assistant @arturpragacz
/homeassistant/components/music_assistant/ @music-assistant
/tests/components/music_assistant/ @music-assistant
/homeassistant/components/mutesync/ @currentoor
/tests/components/mutesync/ @currentoor
/homeassistant/components/my/ @home-assistant/core
@@ -1477,6 +1479,8 @@ build.json @home-assistant/supervisor
/tests/components/smhi/ @gjohansson-ST
/homeassistant/components/smlight/ @tl-sl
/tests/components/smlight/ @tl-sl
/homeassistant/components/sms/ @ocalvo
/tests/components/sms/ @ocalvo
/homeassistant/components/snapcast/ @luar123
/tests/components/snapcast/ @luar123
/homeassistant/components/snmp/ @nmaggioni
@@ -1539,8 +1543,8 @@ build.json @home-assistant/supervisor
/tests/components/suez_water/ @ooii @jb101010-2
/homeassistant/components/sun/ @home-assistant/core
/tests/components/sun/ @home-assistant/core
/homeassistant/components/sunricher_dali/ @niracler
/tests/components/sunricher_dali/ @niracler
/homeassistant/components/sunricher_dali_center/ @niracler
/tests/components/sunricher_dali_center/ @niracler
/homeassistant/components/supla/ @mwegrzynek
/homeassistant/components/surepetcare/ @benleb @danielhiversen
/tests/components/surepetcare/ @benleb @danielhiversen
@@ -1717,8 +1721,8 @@ build.json @home-assistant/supervisor
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
/homeassistant/components/valve/ @home-assistant/core
/tests/components/valve/ @home-assistant/core
/homeassistant/components/vegehub/ @thulrus
/tests/components/vegehub/ @thulrus
/homeassistant/components/vegehub/ @ghowevege
/tests/components/vegehub/ @ghowevege
/homeassistant/components/velbus/ @Cereal2nd @brefra
/tests/components/velbus/ @Cereal2nd @brefra
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
@@ -1817,8 +1821,8 @@ build.json @home-assistant/supervisor
/tests/components/ws66i/ @ssaenger
/homeassistant/components/wyoming/ @synesthesiam
/tests/components/wyoming/ @synesthesiam
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
/tests/components/xbox/ @hunterjm @tr4nt0r
/homeassistant/components/xbox/ @hunterjm
/tests/components/xbox/ @hunterjm
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79

2
Dockerfile generated
View File

@@ -31,7 +31,7 @@ RUN \
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.9.6
RUN pip3 install uv==0.9.5
WORKDIR /usr/src

View File

@@ -13,6 +13,7 @@ RUN \
libavcodec-dev \
libavdevice-dev \
libavutil-dev \
libgammu-dev \
libswscale-dev \
libswresample-dev \
libavfilter-dev \

View File

@@ -6,6 +6,7 @@ Sending HOTP through notify service
from __future__ import annotations
import asyncio
from collections import OrderedDict
import logging
from typing import Any, cast
@@ -303,14 +304,13 @@ class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
if not self._available_notify_services:
return self.async_abort(reason="no_available_service")
schema = vol.Schema(
{
vol.Required("notify_service"): vol.In(self._available_notify_services),
vol.Optional("target"): str,
}
)
schema: dict[str, Any] = OrderedDict()
schema["notify_service"] = vol.In(self._available_notify_services)
schema["target"] = vol.Optional(str)
return self.async_show_form(step_id="init", data_schema=schema, errors=errors)
return self.async_show_form(
step_id="init", data_schema=vol.Schema(schema), errors=errors
)
async def async_step_setup(
self, user_input: dict[str, str] | None = None

View File

@@ -179,18 +179,12 @@ class Data:
user_hash = base64.b64decode(found["password"])
# bcrypt.checkpw is timing-safe
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
# Previously the password was silently truncated.
# https://github.com/pyca/bcrypt/pull/1000
if not bcrypt.checkpw(password.encode()[:72], user_hash):
if not bcrypt.checkpw(password.encode(), user_hash):
raise InvalidAuth
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
"""Encode a password."""
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
# Previously the password was silently truncated.
# https://github.com/pyca/bcrypt/pull/1000
hashed: bytes = bcrypt.hashpw(password.encode()[:72], bcrypt.gensalt(rounds=12))
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
if for_storage:
hashed = base64.b64encode(hashed)

View File

@@ -1,5 +1,11 @@
{
"domain": "yale",
"name": "Yale (non-US/Canada)",
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
"name": "Yale",
"integrations": [
"august",
"yale_smart_alarm",
"yalexs_ble",
"yale_home",
"yale"
]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "yale_august",
"name": "Yale August (US/Canada)",
"integrations": ["august", "august_ble"]
}

View File

@@ -17,11 +17,6 @@ from homeassistant.const import (
CONF_UNIQUE_ID,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import (
ACCOUNT_ID,
@@ -71,15 +66,7 @@ class AdaxConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle the local step."""
data_schema = vol.Schema(
{
vol.Required(WIFI_SSID): str,
vol.Required(WIFI_PSWD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
),
),
}
{vol.Required(WIFI_SSID): str, vol.Required(WIFI_PSWD): str}
)
if user_input is None:
return self.async_show_form(

View File

@@ -2,16 +2,14 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import cast
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import UnitOfEnergy, UnitOfTemperature
from homeassistant.const import UnitOfEnergy
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -22,74 +20,44 @@ from .const import CONNECTION_TYPE, DOMAIN, LOCAL
from .coordinator import AdaxCloudCoordinator
@dataclass(kw_only=True, frozen=True)
class AdaxSensorDescription(SensorEntityDescription):
"""Describes Adax sensor entity."""
data_key: str
SENSORS: tuple[AdaxSensorDescription, ...] = (
AdaxSensorDescription(
key="temperature",
data_key="temperature",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
),
AdaxSensorDescription(
key="energy",
data_key="energyWh",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
suggested_display_precision=3,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AdaxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Adax sensors with config flow."""
"""Set up the Adax energy sensors with config flow."""
if entry.data.get(CONNECTION_TYPE) != LOCAL:
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
# Create individual energy sensors for each device
async_add_entities(
[
AdaxSensor(cloud_coordinator, entity_description, device_id)
for device_id in cloud_coordinator.data
for entity_description in SENSORS
]
AdaxEnergySensor(cloud_coordinator, device_id)
for device_id in cloud_coordinator.data
)
class AdaxSensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
"""Representation of an Adax sensor."""
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
"""Representation of an Adax energy sensor."""
entity_description: AdaxSensorDescription
_attr_has_entity_name = True
_attr_translation_key = "energy"
_attr_device_class = SensorDeviceClass.ENERGY
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
_attr_state_class = SensorStateClass.TOTAL_INCREASING
_attr_suggested_display_precision = 3
def __init__(
self,
coordinator: AdaxCloudCoordinator,
entity_description: AdaxSensorDescription,
device_id: str,
) -> None:
"""Initialize the sensor."""
"""Initialize the energy sensor."""
super().__init__(coordinator)
self.entity_description = entity_description
self._device_id = device_id
room = coordinator.data[device_id]
self._attr_unique_id = (
f"{room['homeId']}_{device_id}_{self.entity_description.key}"
)
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device_id)},
name=room["name"],
@@ -100,14 +68,10 @@ class AdaxSensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and self.entity_description.data_key
in self.coordinator.data[self._device_id]
super().available and "energyWh" in self.coordinator.data[self._device_id]
)
@property
def native_value(self) -> int | float | None:
def native_value(self) -> int:
"""Return the native value of the sensor."""
return self.coordinator.data[self._device_id].get(
self.entity_description.data_key
)
return int(self.coordinator.data[self._device_id]["energyWh"])

View File

@@ -30,7 +30,6 @@ generate_data:
media:
accept:
- "*"
multiple: true
generate_image:
fields:
task_name:
@@ -58,4 +57,3 @@ generate_image:
media:
accept:
- "*"
multiple: true

View File

@@ -23,7 +23,7 @@ from homeassistant.components.bluetooth import (
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ADDRESS
from .const import DEVICE_MODEL, DOMAIN, MFCT_ID
from .const import DOMAIN, MFCT_ID
_LOGGER = logging.getLogger(__name__)
@@ -128,15 +128,15 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm discovery."""
assert self._discovered_device is not None
if user_input is not None:
if self._discovered_device.device.firmware.need_firmware_upgrade:
if (
self._discovered_device is not None
and self._discovered_device.device.firmware.need_firmware_upgrade
):
return self.async_abort(reason="firmware_upgrade_required")
return self.async_create_entry(
title=self.context["title_placeholders"]["name"],
data={DEVICE_MODEL: self._discovered_device.device.model.value},
title=self.context["title_placeholders"]["name"], data={}
)
self._set_confirm_only()
@@ -164,10 +164,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
self._discovered_device = discovery
return self.async_create_entry(
title=discovery.name,
data={DEVICE_MODEL: discovery.device.model.value},
)
return self.async_create_entry(title=discovery.name, data={})
current_addresses = self._async_current_ids(include_ignore=False)
devices: list[BluetoothServiceInfoBleak] = []

View File

@@ -1,16 +1,11 @@
"""Constants for Airthings BLE."""
from airthings_ble import AirthingsDeviceType
DOMAIN = "airthings_ble"
MFCT_ID = 820
VOLUME_BECQUEREL = "Bq/m³"
VOLUME_PICOCURIE = "pCi/L"
DEVICE_MODEL = "device_model"
DEFAULT_SCAN_INTERVAL = 300
DEVICE_SPECIFIC_SCAN_INTERVAL = {AirthingsDeviceType.CORENTIUM_HOME_2.value: 1800}
MAX_RETRIES_AFTER_STARTUP = 5

View File

@@ -16,12 +16,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util.unit_system import METRIC_SYSTEM
from .const import (
DEFAULT_SCAN_INTERVAL,
DEVICE_MODEL,
DEVICE_SPECIFIC_SCAN_INTERVAL,
DOMAIN,
)
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
_LOGGER = logging.getLogger(__name__)
@@ -39,18 +34,12 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
self.airthings = AirthingsBluetoothDeviceData(
_LOGGER, hass.config.units is METRIC_SYSTEM
)
device_model = entry.data.get(DEVICE_MODEL)
interval = DEVICE_SPECIFIC_SCAN_INTERVAL.get(
device_model, DEFAULT_SCAN_INTERVAL
)
super().__init__(
hass,
_LOGGER,
config_entry=entry,
name=DOMAIN,
update_interval=timedelta(seconds=interval),
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
)
async def _async_setup(self) -> None:
@@ -69,29 +58,11 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
)
self.ble_device = ble_device
if DEVICE_MODEL not in self.config_entry.data:
_LOGGER.debug("Fetching device info for migration")
try:
data = await self.airthings.update_device(self.ble_device)
except Exception as err:
raise UpdateFailed(
f"Unable to fetch data for migration: {err}"
) from err
self.hass.config_entries.async_update_entry(
self.config_entry,
data={**self.config_entry.data, DEVICE_MODEL: data.model.value},
)
self.update_interval = timedelta(
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
data.model.value, DEFAULT_SCAN_INTERVAL
)
)
async def _async_update_data(self) -> AirthingsDevice:
"""Get data from Airthings BLE."""
try:
data = await self.airthings.update_device(self.ble_device)
except Exception as err:
raise UpdateFailed(f"Unable to fetch data: {err}") from err
return data

View File

@@ -28,5 +28,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
"iot_class": "local_polling",
"requirements": ["airthings-ble==1.2.0"]
"requirements": ["airthings-ble==1.1.1"]
}

View File

@@ -143,28 +143,5 @@
"name": "Trigger"
}
},
"title": "Alarm control panel",
"triggers": {
"armed": {
"description": "Triggers when an alarm is armed.",
"description_configured": "Triggers when an alarm is armed",
"fields": {
"mode": {
"description": "The arm modes to trigger on. If empty, triggers on all arm modes.",
"name": "Arm modes"
}
},
"name": "When an alarm is armed"
},
"disarmed": {
"description": "Triggers when an alarm is disarmed.",
"description_configured": "Triggers when an alarm is disarmed",
"name": "When an alarm is disarmed"
},
"triggered": {
"description": "Triggers when an alarm is triggered.",
"description_configured": "Triggers when an alarm is triggered",
"name": "When an alarm is triggered"
}
}
"title": "Alarm control panel"
}

View File

@@ -1,283 +0,0 @@
"""Provides triggers for alarm control panels."""
from typing import TYPE_CHECKING, cast, override
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_OPTIONS,
CONF_TARGET,
STATE_UNAVAILABLE,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.target import (
TargetStateChangedData,
async_track_target_selector_state_change_event,
)
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, AlarmControlPanelState
CONF_MODE = "mode"
ARMED_TRIGGER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_OPTIONS, default={}): {
vol.Optional(CONF_MODE, default=[]): vol.All(
cv.ensure_list,
[
vol.In(
[
AlarmControlPanelState.ARMED_HOME,
AlarmControlPanelState.ARMED_AWAY,
AlarmControlPanelState.ARMED_NIGHT,
AlarmControlPanelState.ARMED_VACATION,
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
]
)
],
),
},
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
DISARMED_TRIGGER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_OPTIONS, default={}): {},
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
TRIGGERED_TRIGGER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_OPTIONS, default={}): {},
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
class AlarmArmedTrigger(Trigger):
"""Trigger for when an alarm control panel is armed."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, ARMED_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the alarm armed trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
mode_filter = self._options[CONF_MODE]
# All armed states
armed_states = {
AlarmControlPanelState.ARMED_HOME,
AlarmControlPanelState.ARMED_AWAY,
AlarmControlPanelState.ARMED_NIGHT,
AlarmControlPanelState.ARMED_VACATION,
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
}
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if the new state is an armed state
if to_state.state not in armed_states:
return
# If mode filter is specified, check if the mode matches
if mode_filter and to_state.state not in mode_filter:
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"alarm armed on {entity_id}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class AlarmDisarmedTrigger(Trigger):
"""Trigger for when an alarm control panel is disarmed."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, DISARMED_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the alarm disarmed trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.target is not None
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if the new state is disarmed
if to_state.state != AlarmControlPanelState.DISARMED:
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"alarm disarmed on {entity_id}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class AlarmTriggeredTrigger(Trigger):
"""Trigger for when an alarm control panel is triggered."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, TRIGGERED_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the alarm triggered trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.target is not None
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if the new state is triggered
if to_state.state != AlarmControlPanelState.TRIGGERED:
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"alarm triggered on {entity_id}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
TRIGGERS: dict[str, type[Trigger]] = {
"armed": AlarmArmedTrigger,
"disarmed": AlarmDisarmedTrigger,
"triggered": AlarmTriggeredTrigger,
}
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
"""Return the triggers for alarm control panels."""
return TRIGGERS

View File

@@ -1,30 +0,0 @@
armed:
target:
entity:
domain: alarm_control_panel
fields:
mode:
required: false
default: []
selector:
select:
multiple: true
options:
- value: armed_home
label: Home
- value: armed_away
label: Away
- value: armed_night
label: Night
- value: armed_vacation
label: Vacation
- value: armed_custom_bypass
label: Custom bypass
disarmed:
target:
entity:
domain: alarm_control_panel
triggered:
target:
entity:
domain: alarm_control_panel

View File

@@ -58,10 +58,7 @@ from homeassistant.const import (
from homeassistant.helpers import network
from homeassistant.util import color as color_util, dt as dt_util
from homeassistant.util.decorator import Registry
from homeassistant.util.unit_conversion import (
TemperatureConverter,
TemperatureDeltaConverter,
)
from homeassistant.util.unit_conversion import TemperatureConverter
from .config import AbstractConfig
from .const import (
@@ -847,7 +844,7 @@ def temperature_from_object(
temp -= 273.15
if interval:
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
return TemperatureConverter.convert(temp, from_unit, to_unit)

View File

@@ -6,8 +6,8 @@ from collections.abc import Callable
from dataclasses import dataclass
from typing import Final
from aioamazondevices.const.metadata import SENSOR_STATE_OFF
from aioamazondevices.structures import AmazonDevice
from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SENSOR_STATE_OFF
from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,

View File

@@ -26,6 +26,3 @@ COUNTRY_DOMAINS = {
"us": DEFAULT_DOMAIN,
"za": "co.za",
}
CATEGORY_SENSORS = "sensors"
CATEGORY_NOTIFICATIONS = "notifications"

View File

@@ -2,13 +2,12 @@
from datetime import timedelta
from aioamazondevices.api import AmazonEchoApi
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
from aioamazondevices.exceptions import (
CannotAuthenticate,
CannotConnect,
CannotRetrieveData,
)
from aioamazondevices.structures import AmazonDevice
from aiohttp import ClientSession
from homeassistant.config_entries import ConfigEntry

View File

@@ -2,10 +2,9 @@
from __future__ import annotations
from dataclasses import asdict
from typing import Any
from aioamazondevices.structures import AmazonDevice
from aioamazondevices.api import AmazonDevice
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
@@ -61,5 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
"online": device.online,
"serial number": device.serial_number,
"software version": device.software_version,
"sensors": {key: asdict(sensor) for key, sensor in device.sensors.items()},
"sensors": device.sensors,
}

View File

@@ -1,7 +1,7 @@
"""Defines a base Alexa Devices entity."""
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
from aioamazondevices.structures import AmazonDevice
from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SPEAKER_GROUP_MODEL
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import EntityDescription

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==8.0.1"]
"requirements": ["aioamazondevices==6.4.6"]
}

View File

@@ -6,9 +6,8 @@ from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from typing import Any, Final
from aioamazondevices.api import AmazonEchoApi
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
from aioamazondevices.structures import AmazonDevice
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
from homeassistant.core import HomeAssistant

View File

@@ -4,15 +4,9 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from typing import Final
from aioamazondevices.const.schedules import (
NOTIFICATION_ALARM,
NOTIFICATION_REMINDER,
NOTIFICATION_TIMER,
)
from aioamazondevices.structures import AmazonDevice
from aioamazondevices.api import AmazonDevice
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -25,7 +19,6 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .const import CATEGORY_NOTIFICATIONS, CATEGORY_SENSORS
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
@@ -43,20 +36,6 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
)
category: str = CATEGORY_SENSORS
@dataclass(frozen=True, kw_only=True)
class AmazonNotificationEntityDescription(SensorEntityDescription):
"""Amazon Devices notification entity description."""
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online
and (notification := device.notifications.get(key)) is not None
and notification.next_occurrence is not None
)
category: str = CATEGORY_NOTIFICATIONS
SENSORS: Final = (
@@ -77,23 +56,6 @@ SENSORS: Final = (
state_class=SensorStateClass.MEASUREMENT,
),
)
NOTIFICATIONS: Final = (
AmazonNotificationEntityDescription(
key=NOTIFICATION_ALARM,
translation_key="alarm",
device_class=SensorDeviceClass.TIMESTAMP,
),
AmazonNotificationEntityDescription(
key=NOTIFICATION_REMINDER,
translation_key="reminder",
device_class=SensorDeviceClass.TIMESTAMP,
),
AmazonNotificationEntityDescription(
key=NOTIFICATION_TIMER,
translation_key="timer",
device_class=SensorDeviceClass.TIMESTAMP,
),
)
async def async_setup_entry(
@@ -112,18 +74,12 @@ async def async_setup_entry(
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
sensors_list = [
async_add_entities(
AmazonSensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in SENSORS
for serial_num in new_devices
if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None
]
notifications_list = [
AmazonSensorEntity(coordinator, serial_num, notification_desc)
for notification_desc in NOTIFICATIONS
for serial_num in new_devices
]
async_add_entities(sensors_list + notifications_list)
)
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
@@ -132,9 +88,7 @@ async def async_setup_entry(
class AmazonSensorEntity(AmazonEntity, SensorEntity):
"""Sensor device."""
entity_description: (
AmazonSensorEntityDescription | AmazonNotificationEntityDescription
)
entity_description: AmazonSensorEntityDescription
@property
def native_unit_of_measurement(self) -> str | None:
@@ -147,13 +101,9 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
return super().native_unit_of_measurement
@property
def native_value(self) -> StateType | datetime:
def native_value(self) -> StateType:
"""Return the state of the sensor."""
# Sensors
if self.entity_description.category == CATEGORY_SENSORS:
return self.device.sensors[self.entity_description.key].value
# Notifications
return self.device.notifications[self.entity_description.key].next_occurrence
return self.device.sensors[self.entity_description.key].value
@property
def available(self) -> bool:

View File

@@ -1,6 +1,6 @@
"""Support for services."""
from aioamazondevices.const.sounds import SOUNDS_LIST
from aioamazondevices.sounds import SOUNDS_LIST
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState

View File

@@ -66,17 +66,6 @@
"name": "Speak"
}
},
"sensor": {
"alarm": {
"name": "Next alarm"
},
"reminder": {
"name": "Next reminder"
},
"timer": {
"name": "Next timer"
}
},
"switch": {
"do_not_disturb": {
"name": "Do not disturb"

View File

@@ -6,7 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Final
from aioamazondevices.structures import AmazonDevice
from aioamazondevices.api import AmazonDevice
from homeassistant.components.switch import (
DOMAIN as SWITCH_DOMAIN,

View File

@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps
from typing import Any, Concatenate
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN

View File

@@ -9,14 +9,14 @@ from homeassistant.helpers import config_validation as cv
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
from .services import async_setup_services
from .services import setup_services
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Amber component."""
async_setup_services(hass)
setup_services(hass)
return True

View File

@@ -10,7 +10,6 @@ from homeassistant.core import (
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.selector import ConfigEntrySelector
@@ -103,8 +102,7 @@ def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
return results
@callback
def async_setup_services(hass: HomeAssistant) -> None:
def setup_services(hass: HomeAssistant) -> None:
"""Set up the services for the Amber integration."""
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:

View File

@@ -106,7 +106,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="daily_rain",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL_INCREASING,
state_class=SensorStateClass.TOTAL,
suggested_display_precision=2,
),
SensorEntityDescription(
@@ -150,7 +150,7 @@ SENSOR_DESCRIPTIONS = (
key=TYPE_LIGHTNING_PER_DAY,
translation_key="lightning_strikes_per_day",
native_unit_of_measurement="strikes",
state_class=SensorStateClass.TOTAL_INCREASING,
state_class=SensorStateClass.TOTAL,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
@@ -182,7 +182,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="monthly_rain",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL_INCREASING,
state_class=SensorStateClass.TOTAL,
suggested_display_precision=2,
entity_registry_enabled_default=False,
),
@@ -229,7 +229,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="weekly_rain",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL_INCREASING,
state_class=SensorStateClass.TOTAL,
suggested_display_precision=2,
entity_registry_enabled_default=False,
),
@@ -262,7 +262,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="yearly_rain",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL_INCREASING,
state_class=SensorStateClass.TOTAL,
suggested_display_precision=2,
entity_registry_enabled_default=False,
),

View File

@@ -39,11 +39,11 @@ from .const import (
CONF_TURN_OFF_COMMAND,
CONF_TURN_ON_COMMAND,
DEFAULT_ADB_SERVER_PORT,
DEFAULT_DEVICE_CLASS,
DEFAULT_EXCLUDE_UNNAMED_APPS,
DEFAULT_GET_SOURCES,
DEFAULT_PORT,
DEFAULT_SCREENCAP_INTERVAL,
DEVICE_AUTO,
DEVICE_CLASSES,
DOMAIN,
PROP_ETHMAC,
@@ -89,14 +89,8 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
data_schema = vol.Schema(
{
vol.Required(CONF_HOST, default=host): str,
vol.Required(CONF_DEVICE_CLASS, default=DEVICE_AUTO): SelectSelector(
SelectSelectorConfig(
options=[
SelectOptionDict(value=k, label=v)
for k, v in DEVICE_CLASSES.items()
],
translation_key="device_class",
)
vol.Required(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
DEVICE_CLASSES
),
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
},

View File

@@ -15,19 +15,15 @@ CONF_TURN_OFF_COMMAND = "turn_off_command"
CONF_TURN_ON_COMMAND = "turn_on_command"
DEFAULT_ADB_SERVER_PORT = 5037
DEFAULT_DEVICE_CLASS = "auto"
DEFAULT_EXCLUDE_UNNAMED_APPS = False
DEFAULT_GET_SOURCES = True
DEFAULT_PORT = 5555
DEFAULT_SCREENCAP_INTERVAL = 5
DEVICE_AUTO = "auto"
DEVICE_ANDROIDTV = "androidtv"
DEVICE_FIRETV = "firetv"
DEVICE_CLASSES = {
DEVICE_AUTO: "auto",
DEVICE_ANDROIDTV: "Android TV",
DEVICE_FIRETV: "Fire TV",
}
DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
PROP_ETHMAC = "ethmac"
PROP_SERIALNO = "serialno"

View File

@@ -65,13 +65,6 @@
}
}
},
"selector": {
"device_class": {
"options": {
"auto": "Auto-detect device type"
}
}
},
"services": {
"adb_command": {
"description": "Sends an ADB command to an Android / Fire TV device.",

View File

@@ -25,7 +25,7 @@ from .const import (
RECOMMENDED_CHAT_MODEL,
)
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
PLATFORMS = (Platform.CONVERSATION,)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]

View File

@@ -1,80 +0,0 @@
"""AI Task integration for Anthropic."""
from __future__ import annotations
from json import JSONDecodeError
import logging
from homeassistant.components import ai_task, conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.json import json_loads
from .entity import AnthropicBaseLLMEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up AI Task entities."""
for subentry in config_entry.subentries.values():
if subentry.subentry_type != "ai_task_data":
continue
async_add_entities(
[AnthropicTaskEntity(config_entry, subentry)],
config_subentry_id=subentry.subentry_id,
)
class AnthropicTaskEntity(
ai_task.AITaskEntity,
AnthropicBaseLLMEntity,
):
"""Anthropic AI Task entity."""
_attr_supported_features = (
ai_task.AITaskEntityFeature.GENERATE_DATA
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
)
async def _async_generate_data(
self,
task: ai_task.GenDataTask,
chat_log: conversation.ChatLog,
) -> ai_task.GenDataTaskResult:
"""Handle a generate data task."""
await self._async_handle_chat_log(chat_log, task.name, task.structure)
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
raise HomeAssistantError(
"Last content in chat log is not an AssistantContent"
)
text = chat_log.content[-1].content or ""
if not task.structure:
return ai_task.GenDataTaskResult(
conversation_id=chat_log.conversation_id,
data=text,
)
try:
data = json_loads(text)
except JSONDecodeError as err:
_LOGGER.error(
"Failed to parse JSON response: %s. Response: %s",
err,
text,
)
raise HomeAssistantError("Error with Claude structured response") from err
return ai_task.GenDataTaskResult(
conversation_id=chat_log.conversation_id,
data=data,
)

View File

@@ -2,10 +2,11 @@
from __future__ import annotations
from collections.abc import Mapping
from functools import partial
import json
import logging
from typing import Any
from typing import Any, cast
import anthropic
import voluptuous as vol
@@ -37,7 +38,6 @@ from homeassistant.helpers.selector import (
SelectSelectorConfig,
TemplateSelector,
)
from homeassistant.helpers.typing import VolDictType
from .const import (
CONF_CHAT_MODEL,
@@ -53,10 +53,8 @@ from .const import (
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT_AI_TASK_NAME,
DEFAULT_CONVERSATION_NAME,
DOMAIN,
NON_THINKING_MODELS,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
@@ -75,16 +73,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
}
)
RECOMMENDED_CONVERSATION_OPTIONS = {
RECOMMENDED_OPTIONS = {
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
}
RECOMMENDED_AI_TASK_OPTIONS = {
CONF_RECOMMENDED: True,
}
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
"""Validate the user input allows us to connect.
@@ -107,7 +101,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
errors = {}
if user_input is not None:
self._async_abort_entries_match(user_input)
@@ -135,16 +129,10 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
subentries=[
{
"subentry_type": "conversation",
"data": RECOMMENDED_CONVERSATION_OPTIONS,
"data": RECOMMENDED_OPTIONS,
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
},
{
"subentry_type": "ai_task_data",
"data": RECOMMENDED_AI_TASK_OPTIONS,
"title": DEFAULT_AI_TASK_NAME,
"unique_id": None,
},
}
],
)
@@ -158,240 +146,101 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {
"conversation": ConversationSubentryFlowHandler,
"ai_task_data": ConversationSubentryFlowHandler,
}
return {"conversation": ConversationSubentryFlowHandler}
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
"""Flow for managing conversation subentries."""
options: dict[str, Any]
last_rendered_recommended = False
@property
def _is_new(self) -> bool:
"""Return if this is a new subentry."""
return self.source == "user"
async def async_step_user(
async def async_step_set_options(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Add a subentry."""
if self._subentry_type == "ai_task_data":
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
else:
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
return await self.async_step_init()
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Handle reconfiguration of a subentry."""
self.options = self._get_reconfigure_subentry().data.copy()
return await self.async_step_init()
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Set initial options."""
"""Set conversation options."""
# abort if entry is not loaded
if self._get_entry().state != ConfigEntryState.LOADED:
return self.async_abort(reason="entry_not_loaded")
hass_apis: list[SelectOptionDict] = [
SelectOptionDict(
label=api.name,
value=api.id,
)
for api in llm.async_get_apis(self.hass)
]
if (suggested_llm_apis := self.options.get(CONF_LLM_HASS_API)) and isinstance(
suggested_llm_apis, str
):
self.options[CONF_LLM_HASS_API] = [suggested_llm_apis]
step_schema: VolDictType = {}
errors: dict[str, str] = {}
if self._is_new:
if self._subentry_type == "ai_task_data":
default_name = DEFAULT_AI_TASK_NAME
if user_input is None:
if self._is_new:
options = RECOMMENDED_OPTIONS.copy()
else:
default_name = DEFAULT_CONVERSATION_NAME
step_schema[vol.Required(CONF_NAME, default=default_name)] = str
# If this is a reconfiguration, we need to copy the existing options
# so that we can show the current values in the form.
options = self._get_reconfigure_subentry().data.copy()
if self._subentry_type == "conversation":
step_schema.update(
{
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
): SelectSelector(
SelectSelectorConfig(options=hass_apis, multiple=True)
),
}
self.last_rendered_recommended = cast(
bool, options.get(CONF_RECOMMENDED, False)
)
step_schema[
vol.Required(
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
)
] = bool
if user_input is not None:
elif user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
if not user_input.get(CONF_LLM_HASS_API):
user_input.pop(CONF_LLM_HASS_API, None)
if user_input[CONF_RECOMMENDED]:
if not errors:
if self._is_new:
return self.async_create_entry(
title=user_input.pop(CONF_NAME),
data=user_input,
)
return self.async_update_and_abort(
self._get_entry(),
self._get_reconfigure_subentry(),
data=user_input,
)
else:
self.options.update(user_input)
if (
CONF_LLM_HASS_API in self.options
and CONF_LLM_HASS_API not in user_input
):
self.options.pop(CONF_LLM_HASS_API)
if not errors:
return await self.async_step_advanced()
return self.async_show_form(
step_id="init",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(step_schema), self.options
),
errors=errors or None,
)
async def async_step_advanced(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Manage advanced options."""
errors: dict[str, str] = {}
step_schema: VolDictType = {
vol.Optional(
CONF_CHAT_MODEL,
default=RECOMMENDED_CHAT_MODEL,
): str,
vol.Optional(
CONF_MAX_TOKENS,
default=RECOMMENDED_MAX_TOKENS,
): int,
vol.Optional(
CONF_TEMPERATURE,
default=RECOMMENDED_TEMPERATURE,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
}
if user_input is not None:
self.options.update(user_input)
if not errors:
return await self.async_step_model()
return self.async_show_form(
step_id="advanced",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(step_schema), self.options
),
errors=errors,
)
async def async_step_model(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Manage model-specific options."""
errors: dict[str, str] = {}
step_schema: VolDictType = {}
model = self.options[CONF_CHAT_MODEL]
if not model.startswith(tuple(NON_THINKING_MODELS)):
step_schema[
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
] = vol.All(
NumberSelector(
NumberSelectorConfig(
min=0,
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
)
),
vol.Coerce(int),
)
else:
self.options.pop(CONF_THINKING_BUDGET, None)
if not model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
step_schema.update(
{
vol.Optional(
CONF_WEB_SEARCH,
default=RECOMMENDED_WEB_SEARCH,
): bool,
vol.Optional(
CONF_WEB_SEARCH_MAX_USES,
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
): int,
vol.Optional(
CONF_WEB_SEARCH_USER_LOCATION,
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
): bool,
}
)
else:
self.options.pop(CONF_WEB_SEARCH, None)
self.options.pop(CONF_WEB_SEARCH_MAX_USES, None)
self.options.pop(CONF_WEB_SEARCH_USER_LOCATION, None)
self.options.pop(CONF_WEB_SEARCH_CITY, None)
self.options.pop(CONF_WEB_SEARCH_REGION, None)
self.options.pop(CONF_WEB_SEARCH_COUNTRY, None)
self.options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
if not step_schema:
user_input = {}
if user_input is not None:
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH) and not errors:
if user_input.get(
if user_input.get(
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH):
model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
errors[CONF_WEB_SEARCH] = "web_search_unsupported_model"
elif user_input.get(
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
):
user_input.update(await self._get_location_data())
self.options.update(user_input)
if not errors:
if self._is_new:
return self.async_create_entry(
title=self.options.pop(CONF_NAME),
data=self.options,
title=user_input.pop(CONF_NAME),
data=user_input,
)
return self.async_update_and_abort(
self._get_entry(),
self._get_reconfigure_subentry(),
data=self.options,
data=user_input,
)
return self.async_show_form(
step_id="model",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(step_schema), self.options
options = user_input
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
else:
# Re-render the options again, now with the recommended options shown/hidden
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
options = {
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
CONF_PROMPT: user_input[CONF_PROMPT],
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
}
suggested_values = options.copy()
if not suggested_values.get(CONF_PROMPT):
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
if (
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
) and isinstance(suggested_llm_apis, str):
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
schema = self.add_suggested_values_to_schema(
vol.Schema(
anthropic_config_option_schema(self.hass, self._is_new, options)
),
suggested_values,
)
return self.async_show_form(
step_id="set_options",
data_schema=schema,
errors=errors or None,
last_step=True,
)
async def _get_location_data(self) -> dict[str, str]:
@@ -455,3 +304,77 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
_LOGGER.debug("Location data: %s", location_data)
return location_data
async_step_user = async_step_set_options
async_step_reconfigure = async_step_set_options
def anthropic_config_option_schema(
hass: HomeAssistant,
is_new: bool,
options: Mapping[str, Any],
) -> dict:
"""Return a schema for Anthropic completion options."""
hass_apis: list[SelectOptionDict] = [
SelectOptionDict(
label=api.name,
value=api.id,
)
for api in llm.async_get_apis(hass)
]
if is_new:
schema: dict[vol.Required | vol.Optional, Any] = {
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
}
else:
schema = {}
schema.update(
{
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
vol.Required(
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
): bool,
}
)
if options.get(CONF_RECOMMENDED):
return schema
schema.update(
{
vol.Optional(
CONF_CHAT_MODEL,
default=RECOMMENDED_CHAT_MODEL,
): str,
vol.Optional(
CONF_MAX_TOKENS,
default=RECOMMENDED_MAX_TOKENS,
): int,
vol.Optional(
CONF_TEMPERATURE,
default=RECOMMENDED_TEMPERATURE,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
vol.Optional(
CONF_THINKING_BUDGET,
default=RECOMMENDED_THINKING_BUDGET,
): int,
vol.Optional(
CONF_WEB_SEARCH,
default=RECOMMENDED_WEB_SEARCH,
): bool,
vol.Optional(
CONF_WEB_SEARCH_MAX_USES,
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
): int,
vol.Optional(
CONF_WEB_SEARCH_USER_LOCATION,
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
): bool,
}
)
return schema

View File

@@ -6,7 +6,6 @@ DOMAIN = "anthropic"
LOGGER = logging.getLogger(__package__)
DEFAULT_CONVERSATION_NAME = "Claude conversation"
DEFAULT_AI_TASK_NAME = "Claude AI Task"
CONF_RECOMMENDED = "recommended"
CONF_PROMPT = "prompt"

View File

@@ -1,24 +1,17 @@
"""Base entity for Anthropic."""
import base64
from collections.abc import AsyncGenerator, Callable, Iterable
from dataclasses import dataclass, field
import json
from mimetypes import guess_file_type
from pathlib import Path
from typing import Any
import anthropic
from anthropic import AsyncStream
from anthropic.types import (
Base64ImageSourceParam,
Base64PDFSourceParam,
CitationsDelta,
CitationsWebSearchResultLocation,
CitationWebSearchResultLocationParam,
ContentBlockParam,
DocumentBlockParam,
ImageBlockParam,
InputJSONDelta,
MessageDeltaUsage,
MessageParam,
@@ -44,9 +37,6 @@ from anthropic.types import (
ThinkingConfigDisabledParam,
ThinkingConfigEnabledParam,
ThinkingDelta,
ToolChoiceAnyParam,
ToolChoiceAutoParam,
ToolChoiceToolParam,
ToolParam,
ToolResultBlockParam,
ToolUnionParam,
@@ -60,16 +50,13 @@ from anthropic.types import (
WebSearchToolResultError,
)
from anthropic.types.message_create_params import MessageCreateParamsStreaming
import voluptuous as vol
from voluptuous_openapi import convert
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigSubentry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, llm
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from . import AnthropicConfigEntry
from .const import (
@@ -334,7 +321,6 @@ def _convert_content(
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
chat_log: conversation.ChatLog,
stream: AsyncStream[MessageStreamEvent],
output_tool: str | None = None,
) -> AsyncGenerator[
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
]:
@@ -395,16 +381,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
input="",
)
current_tool_args = ""
if response.content_block.name == output_tool:
if first_block or content_details.has_content():
if content_details.has_citations():
content_details.delete_empty()
yield {"native": content_details}
content_details = ContentDetails()
content_details.add_citation_detail()
yield {"role": "assistant"}
has_native = False
first_block = False
elif isinstance(response.content_block, TextBlock):
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
first_block
@@ -495,16 +471,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
first_block = True
elif isinstance(response, RawContentBlockDeltaEvent):
if isinstance(response.delta, InputJSONDelta):
if (
current_tool_block is not None
and current_tool_block["name"] == output_tool
):
content_details.citation_details[-1].length += len(
response.delta.partial_json
)
yield {"content": response.delta.partial_json}
else:
current_tool_args += response.delta.partial_json
current_tool_args += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
content_details.citation_details[-1].length += len(response.delta.text)
yield {"content": response.delta.text}
@@ -523,9 +490,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
content_details.add_citation(response.delta.citation)
elif isinstance(response, RawContentBlockStopEvent):
if current_tool_block is not None:
if current_tool_block["name"] == output_tool:
current_tool_block = None
continue
tool_args = json.loads(current_tool_args) if current_tool_args else {}
current_tool_block["input"] = tool_args
yield {
@@ -593,8 +557,6 @@ class AnthropicBaseLLMEntity(Entity):
async def _async_handle_chat_log(
self,
chat_log: conversation.ChatLog,
structure_name: str | None = None,
structure: vol.Schema | None = None,
) -> None:
"""Generate an answer for the chat log."""
options = self.subentry.data
@@ -651,74 +613,6 @@ class AnthropicBaseLLMEntity(Entity):
}
tools.append(web_search)
# Handle attachments by adding them to the last user message
last_content = chat_log.content[-1]
if last_content.role == "user" and last_content.attachments:
last_message = messages[-1]
if last_message["role"] != "user":
raise HomeAssistantError(
"Last message must be a user message to add attachments"
)
if isinstance(last_message["content"], str):
last_message["content"] = [
TextBlockParam(type="text", text=last_message["content"])
]
last_message["content"].extend( # type: ignore[union-attr]
await async_prepare_files_for_prompt(
self.hass, [(a.path, a.mime_type) for a in last_content.attachments]
)
)
if structure and structure_name:
structure_name = slugify(structure_name)
if model_args["thinking"]["type"] == "disabled":
if not tools:
# Simplest case: no tools and no extended thinking
# Add a tool and force its use
model_args["tool_choice"] = ToolChoiceToolParam(
type="tool",
name=structure_name,
)
else:
# Second case: tools present but no extended thinking
# Allow the model to use any tool but not text response
# The model should know to use the right tool by its description
model_args["tool_choice"] = ToolChoiceAnyParam(
type="any",
)
else:
# Extended thinking is enabled. With extended thinking, we cannot
# force tool use or disable text responses, so we add a hint to the
# system prompt instead. With extended thinking, the model should be
# smart enough to use the tool.
model_args["tool_choice"] = ToolChoiceAutoParam(
type="auto",
)
if isinstance(model_args["system"], str):
model_args["system"] = [
TextBlockParam(type="text", text=model_args["system"])
]
model_args["system"].append( # type: ignore[union-attr]
TextBlockParam(
type="text",
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
)
)
tools.append(
ToolParam(
name=structure_name,
description="Use this tool to reply to the user",
input_schema=convert(
structure,
custom_serializer=chat_log.llm_api.custom_serializer
if chat_log.llm_api
else llm.selector_serializer,
),
)
)
if tools:
model_args["tools"] = tools
@@ -735,11 +629,7 @@ class AnthropicBaseLLMEntity(Entity):
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(
chat_log,
stream,
output_tool=structure_name if structure else None,
),
_transform_stream(chat_log, stream),
)
]
)
@@ -751,59 +641,3 @@ class AnthropicBaseLLMEntity(Entity):
if not chat_log.unresponded_tool_results:
break
async def async_prepare_files_for_prompt(
hass: HomeAssistant, files: list[tuple[Path, str | None]]
) -> Iterable[ImageBlockParam | DocumentBlockParam]:
"""Append files to a prompt.
Caller needs to ensure that the files are allowed.
"""
def append_files_to_content() -> Iterable[ImageBlockParam | DocumentBlockParam]:
content: list[ImageBlockParam | DocumentBlockParam] = []
for file_path, mime_type in files:
if not file_path.exists():
raise HomeAssistantError(f"`{file_path}` does not exist")
if mime_type is None:
mime_type = guess_file_type(file_path)[0]
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
raise HomeAssistantError(
"Only images and PDF are supported by the Anthropic API,"
f"`{file_path}` is not an image file or PDF"
)
if mime_type == "image/jpg":
mime_type = "image/jpeg"
base64_file = base64.b64encode(file_path.read_bytes()).decode("utf-8")
if mime_type.startswith("image/"):
content.append(
ImageBlockParam(
type="image",
source=Base64ImageSourceParam(
type="base64",
media_type=mime_type, # type: ignore[typeddict-item]
data=base64_file,
),
)
)
elif mime_type.startswith("application/pdf"):
content.append(
DocumentBlockParam(
type="document",
source=Base64PDFSourceParam(
type="base64",
media_type=mime_type, # type: ignore[typeddict-item]
data=base64_file,
),
)
)
return content
return await hass.async_add_executor_job(append_files_to_content)

View File

@@ -18,94 +18,43 @@
}
},
"config_subentries": {
"ai_task_data": {
"abort": {
"entry_not_loaded": "[%key:component::anthropic::config_subentries::conversation::abort::entry_not_loaded%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"entry_type": "AI task",
"initiate_flow": {
"reconfigure": "Reconfigure AI task",
"user": "Add AI task"
},
"step": {
"advanced": {
"data": {
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
},
"init": {
"data": {
"name": "[%key:common::config_flow::data::name%]",
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
},
"model": {
"data": {
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
},
"data_description": {
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::model::title%]"
}
}
},
"conversation": {
"abort": {
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"entry_type": "Conversation agent",
"error": {
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.",
"web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search."
},
"initiate_flow": {
"reconfigure": "Reconfigure conversation agent",
"user": "Add conversation agent"
},
"step": {
"advanced": {
"set_options": {
"data": {
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "Maximum tokens to return in response",
"temperature": "Temperature"
},
"title": "Advanced settings"
},
"init": {
"data": {
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
"max_tokens": "Maximum tokens to return in response",
"name": "[%key:common::config_flow::data::name%]",
"prompt": "[%key:common::config_flow::data::prompt%]",
"recommended": "Recommended model settings"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
},
"title": "Basic settings"
},
"model": {
"data": {
"recommended": "Recommended model settings",
"temperature": "Temperature",
"thinking_budget": "Thinking budget",
"user_location": "Include home location",
"web_search": "Enable web search",
"web_search_max_uses": "Maximum web searches"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template.",
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
"user_location": "Localize search results based on home location",
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
"web_search_max_uses": "Limit the number of searches performed per response"
},
"title": "Model-specific options"
}
}
}
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.4.0"]
"requirements": ["hassil==3.3.0"]
}

View File

@@ -98,27 +98,5 @@
"name": "Start conversation"
}
},
"title": "Assist satellite",
"triggers": {
"listening": {
"description": "Triggers when a satellite starts listening for a command.",
"description_configured": "Triggers when a satellite starts listening for a command",
"name": "When a satellite starts listening"
},
"processing": {
"description": "Triggers when a satellite starts processing a command.",
"description_configured": "Triggers when a satellite starts processing a command",
"name": "When a satellite starts processing"
},
"responding": {
"description": "Triggers when a satellite starts responding to a command.",
"description_configured": "Triggers when a satellite starts responding to a command",
"name": "When a satellite starts responding"
},
"idle": {
"description": "Triggers when a satellite goes back to idle.",
"description_configured": "Triggers when a satellite goes back to idle",
"name": "When a satellite goes back to idle"
}
}
"title": "Assist satellite"
}

View File

@@ -1,140 +0,0 @@
"""Provides triggers for assist satellites."""
from typing import TYPE_CHECKING, cast, override
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_TARGET,
STATE_UNAVAILABLE,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import process_state_match
from homeassistant.helpers.target import (
TargetStateChangedData,
async_track_target_selector_state_change_event,
)
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN
STATE_TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
class StateTriggerBase(Trigger):
"""Trigger for assist satellite state changes."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, STATE_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig, state: str) -> None:
"""Initialize the state trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.target is not None
self._target = config.target
self._state = state
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
match_config_state = process_state_match(self._state)
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if the new state matches the trigger state
if not match_config_state(to_state.state):
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"{entity_id} {self._state}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ListeningTrigger(StateTriggerBase):
"""Trigger for when a satellite starts listening."""
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the listening trigger."""
super().__init__(hass, config, "listening")
class ProcessingTrigger(StateTriggerBase):
"""Trigger for when a satellite starts processing."""
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the processing trigger."""
super().__init__(hass, config, "processing")
class RespondingTrigger(StateTriggerBase):
"""Trigger for when a satellite starts responding."""
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the responding trigger."""
super().__init__(hass, config, "responding")
class IdleTrigger(StateTriggerBase):
"""Trigger for when a satellite goes back to idle."""
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the idle trigger."""
super().__init__(hass, config, "idle")
TRIGGERS: dict[str, type[Trigger]] = {
"listening": ListeningTrigger,
"processing": ProcessingTrigger,
"responding": RespondingTrigger,
"idle": IdleTrigger,
}
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
"""Return the triggers for assist satellites."""
return TRIGGERS

View File

@@ -1,19 +0,0 @@
listening:
target:
entity:
domain: assist_satellite
processing:
target:
entity:
domain: assist_satellite
responding:
target:
entity:
domain: assist_satellite
idle:
target:
entity:
domain: assist_satellite

View File

@@ -14,11 +14,10 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr, issue_registry as ir
from homeassistant.helpers.config_entry_oauth2_flow import (
ImplementationUnavailableError,
OAuth2Session,
async_get_config_entry_implementation,
from homeassistant.helpers import (
config_entry_oauth2_flow,
device_registry as dr,
issue_registry as ir,
)
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
@@ -38,10 +37,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
session = async_create_august_clientsession(hass)
try:
implementation = await async_get_config_entry_implementation(hass, entry)
except ImplementationUnavailableError as err:
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
)
except ValueError as err:
raise ConfigEntryNotReady("OAuth implementation not available") from err
oauth_session = OAuth2Session(hass, entry, implementation)
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
try:
await async_setup_august(hass, entry, august_gateway)

View File

@@ -6,5 +6,5 @@
"iot_class": "local_polling",
"loggers": ["avea"],
"quality_scale": "legacy",
"requirements": ["avea==1.6.1"]
"requirements": ["avea==1.5.1"]
}

View File

@@ -1,116 +0,0 @@
"""The Backblaze B2 integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any
from b2sdk.v2 import B2Api, Bucket, InMemoryAccountInfo, exception
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.event import async_track_time_interval
from .const import (
BACKBLAZE_REALM,
CONF_APPLICATION_KEY,
CONF_BUCKET,
CONF_KEY_ID,
DATA_BACKUP_AGENT_LISTENERS,
DOMAIN,
)
from .repairs import (
async_check_for_repair_issues,
create_bucket_access_restricted_issue,
create_bucket_not_found_issue,
)
_LOGGER = logging.getLogger(__name__)
type BackblazeConfigEntry = ConfigEntry[Bucket]
async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
"""Set up Backblaze B2 from a config entry."""
info = InMemoryAccountInfo()
b2_api = B2Api(info)
def _authorize_and_get_bucket_sync() -> Bucket:
"""Synchronously authorize the Backblaze B2 account and retrieve the bucket.
This function runs in the event loop's executor as b2sdk operations are blocking.
"""
b2_api.authorize_account(
BACKBLAZE_REALM,
entry.data[CONF_KEY_ID],
entry.data[CONF_APPLICATION_KEY],
)
return b2_api.get_bucket_by_name(entry.data[CONF_BUCKET])
try:
bucket = await hass.async_add_executor_job(_authorize_and_get_bucket_sync)
except exception.Unauthorized as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_credentials",
) from err
except exception.RestrictedBucket as err:
create_bucket_access_restricted_issue(hass, entry, err.bucket_name)
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="restricted_bucket",
translation_placeholders={
"restricted_bucket_name": err.bucket_name,
},
) from err
except exception.NonExistentBucket as err:
create_bucket_not_found_issue(hass, entry, entry.data[CONF_BUCKET])
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="invalid_bucket_name",
) from err
except exception.ConnectionReset as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from err
except exception.MissingAccountData as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
) from err
entry.runtime_data = bucket
def _async_notify_backup_listeners() -> None:
"""Notify any registered backup agent listeners."""
_LOGGER.debug("Notifying backup listeners for entry %s", entry.entry_id)
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
listener()
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
async def _periodic_issue_check(_now: Any) -> None:
"""Periodically check for repair issues."""
await async_check_for_repair_issues(hass, entry)
entry.async_on_unload(
async_track_time_interval(hass, _periodic_issue_check, timedelta(minutes=30))
)
hass.async_create_task(async_check_for_repair_issues(hass, entry))
return True
async def async_unload_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
"""Unload a Backblaze B2 config entry.
Any resources directly managed by this entry that need explicit shutdown
would be handled here. In this case, the `async_on_state_change` listener
handles the notification logic on unload.
"""
return True

View File

@@ -1,615 +0,0 @@
"""Backup platform for the Backblaze B2 integration."""
import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine
import functools
import json
import logging
import mimetypes
from time import time
from typing import Any
from b2sdk.v2 import FileVersion
from b2sdk.v2.exception import B2Error
from homeassistant.components.backup import (
AgentBackup,
BackupAgent,
BackupAgentError,
BackupNotFound,
suggested_filename,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.util.async_iterator import AsyncIteratorReader
from . import BackblazeConfigEntry
from .const import (
CONF_PREFIX,
DATA_BACKUP_AGENT_LISTENERS,
DOMAIN,
METADATA_FILE_SUFFIX,
METADATA_VERSION,
)
_LOGGER = logging.getLogger(__name__)
# Cache TTL for backup list (in seconds)
CACHE_TTL = 300
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
"""Return the suggested filenames for the backup and metadata files."""
base_name = suggested_filename(backup).rsplit(".", 1)[0]
return f"{base_name}.tar", f"{base_name}.metadata.json"
def _parse_metadata(raw_content: str) -> dict[str, Any]:
"""Parse metadata content from JSON."""
try:
data = json.loads(raw_content)
except json.JSONDecodeError as err:
raise ValueError(f"Invalid JSON format: {err}") from err
else:
if not isinstance(data, dict):
raise TypeError("JSON content is not a dictionary")
return data
def _find_backup_file_for_metadata(
metadata_filename: str, all_files: dict[str, FileVersion], prefix: str
) -> FileVersion | None:
"""Find corresponding backup file for metadata file."""
base_name = metadata_filename[len(prefix) :].removesuffix(METADATA_FILE_SUFFIX)
return next(
(
file
for name, file in all_files.items()
if name.startswith(prefix + base_name)
and name.endswith(".tar")
and name != metadata_filename
),
None,
)
def _create_backup_from_metadata(
metadata_content: dict[str, Any], backup_file: FileVersion
) -> AgentBackup:
"""Construct an AgentBackup from parsed metadata content and the associated backup file."""
metadata = metadata_content["backup_metadata"]
metadata["size"] = backup_file.size
return AgentBackup.from_dict(metadata)
def handle_b2_errors[T](
func: Callable[..., Coroutine[Any, Any, T]],
) -> Callable[..., Coroutine[Any, Any, T]]:
"""Handle B2Errors by converting them to BackupAgentError."""
@functools.wraps(func)
async def wrapper(*args: Any, **kwargs: Any) -> T:
"""Catch B2Error and raise BackupAgentError."""
try:
return await func(*args, **kwargs)
except B2Error as err:
error_msg = f"Failed during {func.__name__}"
raise BackupAgentError(error_msg) from err
return wrapper
async def async_get_backup_agents(
hass: HomeAssistant,
) -> list[BackupAgent]:
"""Return a list of backup agents for all configured Backblaze B2 entries."""
entries: list[BackblazeConfigEntry] = hass.config_entries.async_loaded_entries(
DOMAIN
)
return [BackblazeBackupAgent(hass, entry) for entry in entries]
@callback
def async_register_backup_agents_listener(
hass: HomeAssistant,
*,
listener: Callable[[], None],
**kwargs: Any,
) -> Callable[[], None]:
"""Register a listener to be called when backup agents are added or removed.
:return: A function to unregister the listener.
"""
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
@callback
def remove_listener() -> None:
"""Remove the listener."""
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
hass.data.pop(DATA_BACKUP_AGENT_LISTENERS, None)
return remove_listener
class BackblazeBackupAgent(BackupAgent):
"""Backup agent for Backblaze B2 cloud storage."""
domain = DOMAIN
def __init__(self, hass: HomeAssistant, entry: BackblazeConfigEntry) -> None:
"""Initialize the Backblaze B2 agent."""
super().__init__()
self._hass = hass
self._bucket = entry.runtime_data
self._prefix = entry.data[CONF_PREFIX]
self.name = entry.title
self.unique_id = entry.entry_id
self._all_files_cache: dict[str, FileVersion] = {}
self._all_files_cache_expiration: float = 0.0
self._backup_list_cache: dict[str, AgentBackup] = {}
self._backup_list_cache_expiration: float = 0.0
self._all_files_cache_lock = asyncio.Lock()
self._backup_list_cache_lock = asyncio.Lock()
def _is_cache_valid(self, expiration_time: float) -> bool:
"""Check if cache is still valid based on expiration time."""
return time() <= expiration_time
async def _cleanup_failed_upload(self, filename: str) -> None:
"""Clean up a partially uploaded file after upload failure."""
_LOGGER.warning(
"Attempting to delete partially uploaded main backup file %s "
"due to metadata upload failure",
filename,
)
try:
uploaded_main_file_info = await self._hass.async_add_executor_job(
self._bucket.get_file_info_by_name, filename
)
await self._hass.async_add_executor_job(uploaded_main_file_info.delete)
except B2Error:
_LOGGER.debug(
"Failed to clean up partially uploaded main backup file %s. "
"Manual intervention may be required to delete it from Backblaze B2",
filename,
exc_info=True,
)
else:
_LOGGER.debug(
"Successfully deleted partially uploaded main backup file %s", filename
)
async def _get_file_for_download(self, backup_id: str) -> FileVersion:
"""Get backup file for download, raising if not found."""
file, _ = await self._find_file_and_metadata_version_by_id(backup_id)
if not file:
raise BackupNotFound(f"Backup {backup_id} not found")
return file
@handle_b2_errors
async def async_download_backup(
self, backup_id: str, **kwargs: Any
) -> AsyncIterator[bytes]:
"""Download a backup from Backblaze B2."""
file = await self._get_file_for_download(backup_id)
_LOGGER.debug("Downloading %s", file.file_name)
downloaded_file = await self._hass.async_add_executor_job(file.download)
response = downloaded_file.response
async def stream_response() -> AsyncIterator[bytes]:
"""Stream the response into an AsyncIterator."""
try:
iterator = response.iter_content(chunk_size=1024 * 1024)
while True:
chunk = await self._hass.async_add_executor_job(
next, iterator, None
)
if chunk is None:
break
yield chunk
finally:
_LOGGER.debug("Finished streaming download for %s", file.file_name)
return stream_response()
@handle_b2_errors
async def async_upload_backup(
self,
*,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
backup: AgentBackup,
**kwargs: Any,
) -> None:
"""Upload a backup to Backblaze B2.
This involves uploading the main backup archive and a separate metadata JSON file.
"""
tar_filename, metadata_filename = suggested_filenames(backup)
prefixed_tar_filename = self._prefix + tar_filename
prefixed_metadata_filename = self._prefix + metadata_filename
metadata_content_bytes = json.dumps(
{
"metadata_version": METADATA_VERSION,
"backup_id": backup.backup_id,
"backup_metadata": backup.as_dict(),
}
).encode("utf-8")
_LOGGER.debug(
"Uploading backup: %s, and metadata: %s",
prefixed_tar_filename,
prefixed_metadata_filename,
)
upload_successful = False
try:
await self._upload_backup_file(prefixed_tar_filename, open_stream, {})
_LOGGER.debug(
"Main backup file upload finished for %s", prefixed_tar_filename
)
_LOGGER.debug("Uploading metadata file: %s", prefixed_metadata_filename)
await self._upload_metadata_file(
metadata_content_bytes, prefixed_metadata_filename
)
_LOGGER.debug(
"Metadata file upload finished for %s", prefixed_metadata_filename
)
upload_successful = True
finally:
if upload_successful:
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
self._invalidate_caches(
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
)
else:
await self._cleanup_failed_upload(prefixed_tar_filename)
def _upload_metadata_file_sync(
self, metadata_content: bytes, filename: str
) -> None:
"""Synchronously upload metadata file to B2."""
self._bucket.upload_bytes(
metadata_content,
filename,
content_type="application/json",
file_info={"metadata_only": "true"},
)
async def _upload_metadata_file(
self, metadata_content: bytes, filename: str
) -> None:
"""Upload metadata file to B2."""
await self._hass.async_add_executor_job(
self._upload_metadata_file_sync,
metadata_content,
filename,
)
def _upload_unbound_stream_sync(
self,
reader: AsyncIteratorReader,
filename: str,
content_type: str,
file_info: dict[str, Any],
) -> FileVersion:
"""Synchronously upload unbound stream to B2."""
return self._bucket.upload_unbound_stream(
reader,
filename,
content_type=content_type,
file_info=file_info,
)
def _download_and_parse_metadata_sync(
self, metadata_file_version: FileVersion
) -> dict[str, Any]:
"""Synchronously download and parse metadata file."""
return _parse_metadata(
metadata_file_version.download().response.content.decode("utf-8")
)
async def _upload_backup_file(
self,
filename: str,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
file_info: dict[str, Any],
) -> None:
"""Upload backup file to B2 using streaming."""
_LOGGER.debug("Starting streaming upload for %s", filename)
stream = await open_stream()
reader = AsyncIteratorReader(self._hass.loop, stream)
_LOGGER.debug("Uploading backup file %s with streaming", filename)
try:
content_type, _ = mimetypes.guess_type(filename)
file_version = await self._hass.async_add_executor_job(
self._upload_unbound_stream_sync,
reader,
filename,
content_type or "application/x-tar",
file_info,
)
finally:
reader.close()
_LOGGER.debug("Successfully uploaded %s (ID: %s)", filename, file_version.id_)
@handle_b2_errors
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
"""Delete a backup and its associated metadata file from Backblaze B2."""
file, metadata_file = await self._find_file_and_metadata_version_by_id(
backup_id
)
if not file:
raise BackupNotFound(f"Backup {backup_id} not found")
# Invariant: when file is not None, metadata_file is also not None
assert metadata_file is not None
_LOGGER.debug(
"Deleting backup file: %s and metadata file: %s",
file.file_name,
metadata_file.file_name,
)
await self._hass.async_add_executor_job(file.delete)
await self._hass.async_add_executor_job(metadata_file.delete)
self._invalidate_caches(
backup_id,
file.file_name,
metadata_file.file_name,
remove_files=True,
)
@handle_b2_errors
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List all backups by finding their associated metadata files in Backblaze B2."""
async with self._backup_list_cache_lock:
if self._backup_list_cache and self._is_cache_valid(
self._backup_list_cache_expiration
):
_LOGGER.debug("Returning backups from cache")
return list(self._backup_list_cache.values())
_LOGGER.debug(
"Cache expired or empty, fetching all files from B2 to build backup list"
)
all_files_in_prefix = await self._get_all_files_in_prefix()
_LOGGER.debug(
"Files found in prefix '%s': %s",
self._prefix,
list(all_files_in_prefix.keys()),
)
# Process metadata files sequentially to avoid exhausting executor pool
backups = {}
for file_name, file_version in all_files_in_prefix.items():
if file_name.endswith(METADATA_FILE_SUFFIX):
backup = await self._hass.async_add_executor_job(
self._process_metadata_file_sync,
file_name,
file_version,
all_files_in_prefix,
)
if backup:
backups[backup.backup_id] = backup
self._backup_list_cache = backups
self._backup_list_cache_expiration = time() + CACHE_TTL
return list(backups.values())
@handle_b2_errors
async def async_get_backup(self, backup_id: str, **kwargs: Any) -> AgentBackup:
"""Get a specific backup by its ID from Backblaze B2."""
if self._backup_list_cache and self._is_cache_valid(
self._backup_list_cache_expiration
):
if backup := self._backup_list_cache.get(backup_id):
_LOGGER.debug("Returning backup %s from cache", backup_id)
return backup
file, metadata_file_version = await self._find_file_and_metadata_version_by_id(
backup_id
)
if not file or not metadata_file_version:
raise BackupNotFound(f"Backup {backup_id} not found")
metadata_content = await self._hass.async_add_executor_job(
self._download_and_parse_metadata_sync,
metadata_file_version,
)
_LOGGER.debug(
"Successfully retrieved metadata for backup ID %s from file %s",
backup_id,
metadata_file_version.file_name,
)
backup = _create_backup_from_metadata(metadata_content, file)
if self._is_cache_valid(self._backup_list_cache_expiration):
self._backup_list_cache[backup.backup_id] = backup
return backup
async def _find_file_and_metadata_version_by_id(
self, backup_id: str
) -> tuple[FileVersion | None, FileVersion | None]:
"""Find the main backup file and its associated metadata file version by backup ID."""
all_files_in_prefix = await self._get_all_files_in_prefix()
# Process metadata files sequentially to avoid exhausting executor pool
for file_name, file_version in all_files_in_prefix.items():
if file_name.endswith(METADATA_FILE_SUFFIX):
(
result_backup_file,
result_metadata_file_version,
) = await self._hass.async_add_executor_job(
self._process_metadata_file_for_id_sync,
file_name,
file_version,
backup_id,
all_files_in_prefix,
)
if result_backup_file and result_metadata_file_version:
return result_backup_file, result_metadata_file_version
_LOGGER.debug("Backup %s not found", backup_id)
return None, None
def _process_metadata_file_for_id_sync(
self,
file_name: str,
file_version: FileVersion,
target_backup_id: str,
all_files_in_prefix: dict[str, FileVersion],
) -> tuple[FileVersion | None, FileVersion | None]:
"""Synchronously process a single metadata file for a specific backup ID.
Called within a thread pool executor.
"""
try:
download_response = file_version.download().response
except B2Error as err:
_LOGGER.warning(
"Failed to download metadata file %s during ID search: %s",
file_name,
err,
)
return None, None
try:
metadata_content = _parse_metadata(
download_response.content.decode("utf-8")
)
except ValueError:
return None, None
if metadata_content["backup_id"] != target_backup_id:
_LOGGER.debug(
"Metadata file %s does not match target backup ID %s",
file_name,
target_backup_id,
)
return None, None
found_backup_file = _find_backup_file_for_metadata(
file_name, all_files_in_prefix, self._prefix
)
if not found_backup_file:
_LOGGER.warning(
"Found metadata file %s for backup ID %s, but no corresponding backup file",
file_name,
target_backup_id,
)
return None, None
_LOGGER.debug(
"Found backup file %s and metadata file %s for ID %s",
found_backup_file.file_name,
file_name,
target_backup_id,
)
return found_backup_file, file_version
async def _get_all_files_in_prefix(self) -> dict[str, FileVersion]:
"""Get all file versions in the configured prefix from Backblaze B2.
Uses a cache to minimize API calls.
This fetches a flat list of all files, including main backups and metadata files.
"""
async with self._all_files_cache_lock:
if self._is_cache_valid(self._all_files_cache_expiration):
_LOGGER.debug("Returning all files from cache")
return self._all_files_cache
_LOGGER.debug("Cache for all files expired or empty, fetching from B2")
all_files_in_prefix = await self._hass.async_add_executor_job(
self._fetch_all_files_in_prefix
)
self._all_files_cache = all_files_in_prefix
self._all_files_cache_expiration = time() + CACHE_TTL
return all_files_in_prefix
def _fetch_all_files_in_prefix(self) -> dict[str, FileVersion]:
"""Fetch all files in the configured prefix from B2."""
all_files: dict[str, FileVersion] = {}
for file, _ in self._bucket.ls(self._prefix):
all_files[file.file_name] = file
return all_files
def _process_metadata_file_sync(
self,
file_name: str,
file_version: FileVersion,
all_files_in_prefix: dict[str, FileVersion],
) -> AgentBackup | None:
"""Synchronously process a single metadata file and return an AgentBackup if valid."""
try:
download_response = file_version.download().response
except B2Error as err:
_LOGGER.warning("Failed to download metadata file %s: %s", file_name, err)
return None
try:
metadata_content = _parse_metadata(
download_response.content.decode("utf-8")
)
except ValueError:
return None
found_backup_file = _find_backup_file_for_metadata(
file_name, all_files_in_prefix, self._prefix
)
if not found_backup_file:
_LOGGER.warning(
"Found metadata file %s but no corresponding backup file",
file_name,
)
return None
_LOGGER.debug(
"Successfully processed metadata file %s for backup ID %s",
file_name,
metadata_content["backup_id"],
)
return _create_backup_from_metadata(metadata_content, found_backup_file)
def _invalidate_caches(
self,
backup_id: str,
tar_filename: str,
metadata_filename: str | None,
*,
remove_files: bool = False,
) -> None:
"""Invalidate caches after upload/deletion operations.
Args:
backup_id: The backup ID to remove from backup cache
tar_filename: The tar filename to remove from files cache
metadata_filename: The metadata filename to remove from files cache
remove_files: If True, remove specific files from cache; if False, expire entire cache
"""
if remove_files:
if self._is_cache_valid(self._all_files_cache_expiration):
self._all_files_cache.pop(tar_filename, None)
if metadata_filename:
self._all_files_cache.pop(metadata_filename, None)
if self._is_cache_valid(self._backup_list_cache_expiration):
self._backup_list_cache.pop(backup_id, None)
else:
# For uploads, we can't easily add new FileVersion objects without API calls,
# so we expire the entire cache for simplicity
self._all_files_cache_expiration = 0.0
self._backup_list_cache_expiration = 0.0

View File

@@ -1,288 +0,0 @@
"""Config flow for the Backblaze B2 integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from b2sdk.v2 import B2Api, InMemoryAccountInfo, exception
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import (
BACKBLAZE_REALM,
CONF_APPLICATION_KEY,
CONF_BUCKET,
CONF_KEY_ID,
CONF_PREFIX,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
# Constants
REQUIRED_CAPABILITIES = {"writeFiles", "listFiles", "deleteFiles", "readFiles"}
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_KEY_ID): cv.string,
vol.Required(CONF_APPLICATION_KEY): TextSelector(
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
vol.Required(CONF_BUCKET): cv.string,
vol.Optional(CONF_PREFIX, default=""): cv.string,
}
)
class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Backblaze B2."""
VERSION = 1
reauth_entry: ConfigEntry[Any] | None
def _abort_if_duplicate_credentials(self, user_input: dict[str, Any]) -> None:
"""Abort if credentials already exist in another entry."""
self._async_abort_entries_match(
{
CONF_KEY_ID: user_input[CONF_KEY_ID],
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
}
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initiated by the user."""
errors: dict[str, str] = {}
placeholders: dict[str, str] = {}
if user_input is not None:
self._abort_if_duplicate_credentials(user_input)
errors, placeholders = await self._async_validate_backblaze_connection(
user_input
)
if not errors:
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
"/"
):
user_input[CONF_PREFIX] += "/"
return self.async_create_entry(
title=user_input[CONF_BUCKET], data=user_input
)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
STEP_USER_DATA_SCHEMA, user_input
),
errors=errors,
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
)
async def _async_validate_backblaze_connection(
self, user_input: dict[str, Any]
) -> tuple[dict[str, str], dict[str, str]]:
"""Validate Backblaze B2 credentials, bucket, capabilities, and prefix.
Returns a tuple of (errors_dict, placeholders_dict).
"""
errors: dict[str, str] = {}
placeholders: dict[str, str] = {}
info = InMemoryAccountInfo()
b2_api = B2Api(info)
def _authorize_and_get_bucket_sync() -> None:
"""Synchronously authorize the account and get the bucket by name.
This function is run in the executor because b2sdk operations are blocking.
"""
b2_api.authorize_account(
BACKBLAZE_REALM, # Use the defined realm constant
user_input[CONF_KEY_ID],
user_input[CONF_APPLICATION_KEY],
)
b2_api.get_bucket_by_name(user_input[CONF_BUCKET])
try:
await self.hass.async_add_executor_job(_authorize_and_get_bucket_sync)
allowed = b2_api.account_info.get_allowed()
# Check if allowed info is available
if allowed is None or not allowed.get("capabilities"):
errors["base"] = "invalid_capability"
placeholders["missing_capabilities"] = ", ".join(
sorted(REQUIRED_CAPABILITIES)
)
else:
# Check if all required capabilities are present
current_caps = set(allowed["capabilities"])
if not REQUIRED_CAPABILITIES.issubset(current_caps):
missing_caps = REQUIRED_CAPABILITIES - current_caps
_LOGGER.warning(
"Missing required Backblaze B2 capabilities for Key ID '%s': %s",
user_input[CONF_KEY_ID],
", ".join(sorted(missing_caps)),
)
errors["base"] = "invalid_capability"
placeholders["missing_capabilities"] = ", ".join(
sorted(missing_caps)
)
else:
# Only check prefix if capabilities are valid
configured_prefix: str = user_input[CONF_PREFIX]
allowed_prefix = allowed.get("namePrefix") or ""
# Ensure configured prefix starts with Backblaze B2's allowed prefix
if allowed_prefix and not configured_prefix.startswith(
allowed_prefix
):
errors[CONF_PREFIX] = "invalid_prefix"
placeholders["allowed_prefix"] = allowed_prefix
except exception.Unauthorized:
_LOGGER.debug(
"Backblaze B2 authentication failed for Key ID '%s'",
user_input[CONF_KEY_ID],
)
errors["base"] = "invalid_credentials"
except exception.RestrictedBucket as err:
_LOGGER.debug(
"Access to Backblaze B2 bucket '%s' is restricted: %s",
user_input[CONF_BUCKET],
err,
)
placeholders["restricted_bucket_name"] = err.bucket_name
errors[CONF_BUCKET] = "restricted_bucket"
except exception.NonExistentBucket:
_LOGGER.debug(
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
)
errors[CONF_BUCKET] = "invalid_bucket_name"
except exception.ConnectionReset:
_LOGGER.error("Failed to connect to Backblaze B2. Connection reset")
errors["base"] = "cannot_connect"
except exception.MissingAccountData:
# This generally indicates an issue with how InMemoryAccountInfo is used
_LOGGER.error(
"Missing account data during Backblaze B2 authorization for Key ID '%s'",
user_input[CONF_KEY_ID],
)
errors["base"] = "invalid_credentials"
except Exception:
_LOGGER.exception(
"An unexpected error occurred during Backblaze B2 configuration for Key ID '%s'",
user_input[CONF_KEY_ID],
)
errors["base"] = "unknown"
return errors, placeholders
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication flow."""
self.reauth_entry = self.hass.config_entries.async_get_entry(
self.context["entry_id"]
)
assert self.reauth_entry is not None
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauthentication."""
assert self.reauth_entry is not None
errors: dict[str, str] = {}
placeholders: dict[str, str] = {}
if user_input is not None:
self._abort_if_duplicate_credentials(user_input)
validation_input = {
CONF_KEY_ID: user_input[CONF_KEY_ID],
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
CONF_BUCKET: self.reauth_entry.data[CONF_BUCKET],
CONF_PREFIX: self.reauth_entry.data[CONF_PREFIX],
}
errors, placeholders = await self._async_validate_backblaze_connection(
validation_input
)
if not errors:
return self.async_update_reload_and_abort(
self.reauth_entry,
data_updates={
CONF_KEY_ID: user_input[CONF_KEY_ID],
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_KEY_ID): cv.string,
vol.Required(CONF_APPLICATION_KEY): TextSelector(
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
}
),
errors=errors,
description_placeholders={
"brand_name": "Backblaze B2",
"bucket": self.reauth_entry.data[CONF_BUCKET],
**placeholders,
},
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration flow."""
entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
assert entry is not None
if user_input is not None:
self._abort_if_duplicate_credentials(user_input)
errors, placeholders = await self._async_validate_backblaze_connection(
user_input
)
if not errors:
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
"/"
):
user_input[CONF_PREFIX] += "/"
return self.async_update_reload_and_abort(
entry,
data_updates=user_input,
)
else:
errors = {}
placeholders = {}
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
STEP_USER_DATA_SCHEMA, user_input or entry.data
),
errors=errors,
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
)

View File

@@ -1,22 +0,0 @@
"""Constants for the Backblaze B2 integration."""
from collections.abc import Callable
from typing import Final
from homeassistant.util.hass_dict import HassKey
DOMAIN: Final = "backblaze_b2"
CONF_KEY_ID = "key_id"
CONF_APPLICATION_KEY = "application_key"
CONF_BUCKET = "bucket"
CONF_PREFIX = "prefix"
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
f"{DOMAIN}.backup_agent_listeners"
)
METADATA_FILE_SUFFIX = ".metadata.json"
METADATA_VERSION = "1"
BACKBLAZE_REALM = "production"

View File

@@ -1,56 +0,0 @@
"""Diagnostics support for Backblaze B2."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.core import HomeAssistant
from . import BackblazeConfigEntry
from .const import CONF_APPLICATION_KEY, CONF_KEY_ID
TO_REDACT_ENTRY_DATA = {CONF_APPLICATION_KEY, CONF_KEY_ID}
TO_REDACT_ACCOUNT_DATA_ALLOWED = {"bucketId", "bucketName", "namePrefix"}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: BackblazeConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
bucket = entry.runtime_data
try:
bucket_info = {
"name": bucket.name,
"id": bucket.id_,
"type": bucket.type_,
"cors_rules": bucket.cors_rules,
"lifecycle_rules": bucket.lifecycle_rules,
"revision": bucket.revision,
}
account_info = bucket.api.account_info
account_data: dict[str, Any] = {
"account_id": account_info.get_account_id(),
"api_url": account_info.get_api_url(),
"download_url": account_info.get_download_url(),
"minimum_part_size": account_info.get_minimum_part_size(),
"allowed": account_info.get_allowed(),
}
if isinstance(account_data["allowed"], dict):
account_data["allowed"] = async_redact_data(
account_data["allowed"], TO_REDACT_ACCOUNT_DATA_ALLOWED
)
except (AttributeError, TypeError, ValueError, KeyError):
bucket_info = {"name": "unknown", "id": "unknown"}
account_data = {"error": "Failed to retrieve detailed account information"}
return {
"entry_data": async_redact_data(entry.data, TO_REDACT_ENTRY_DATA),
"entry_options": entry.options,
"bucket_info": bucket_info,
"account_info": account_data,
}

View File

@@ -1,12 +0,0 @@
{
"domain": "backblaze_b2",
"name": "Backblaze B2",
"codeowners": ["@hugo-vrijswijk", "@ElCruncharino"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/backblaze_b2",
"integration_type": "service",
"iot_class": "cloud_push",
"loggers": ["b2sdk"],
"quality_scale": "bronze",
"requirements": ["b2sdk==2.8.1"]
}

View File

@@ -1,124 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: Integration does not register custom actions.
appropriate-polling:
status: exempt
comment: Integration does not poll.
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: This integration does not have any custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: Entities of this integration do not explicitly subscribe to events.
entity-unique-id:
status: exempt
comment: |
This integration does not have entities.
has-entity-name:
status: exempt
comment: |
This integration does not have entities.
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: Integration does not register custom actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: This integration does not have an options flow.
docs-installation-parameters: done
entity-unavailable:
status: exempt
comment: This integration does not have entities.
integration-owner: done
log-when-unavailable:
status: exempt
comment: This integration does not have entities.
parallel-updates:
status: exempt
comment: This integration does not poll.
reauthentication-flow: done
test-coverage: done
# Gold
devices:
status: exempt
comment: This integration does not have entities.
diagnostics: done
discovery-update-info:
status: exempt
comment: Backblaze B2 is a cloud service that is not discovered on the network.
discovery:
status: exempt
comment: Backblaze B2 is a cloud service that is not discovered on the network.
docs-data-update:
status: exempt
comment: This integration does not poll.
docs-examples:
status: exempt
comment: The integration extends core functionality and does not require examples.
docs-known-limitations: done
docs-supported-devices:
status: exempt
comment: This integration does not support physical devices.
docs-supported-functions:
status: exempt
comment: This integration does not have entities.
docs-troubleshooting: todo
docs-use-cases: done
dynamic-devices:
status: exempt
comment: This integration does not have devices.
entity-category:
status: exempt
comment: This integration does not have entities.
entity-device-class:
status: exempt
comment: This integration does not have entities.
entity-disabled-by-default:
status: exempt
comment: This integration does not have entities.
entity-translations:
status: exempt
comment: This integration does not have entities.
exception-translations: done
icon-translations:
status: exempt
comment: This integration does not use icons.
reconfiguration-flow: done
repair-issues: done
stale-devices:
status: exempt
comment: This integration does not have devices.
# Platinum
async-dependency:
status: exempt
comment: |
The b2sdk library is synchronous by design. All sync operations are properly
wrapped with async_add_executor_job to prevent blocking the event loop.
inject-websession:
status: exempt
comment: |
The b2sdk library does not support custom HTTP session injection.
It manages HTTP connections internally through its own session management.
strict-typing:
status: exempt
comment: |
The b2sdk dependency does not include a py.typed file and is not PEP 561 compliant.
This is outside the integration's control as it's a third-party library requirement.

View File

@@ -1,93 +0,0 @@
"""Repair issues for the Backblaze B2 integration."""
from __future__ import annotations
import logging
from b2sdk.v2.exception import (
B2Error,
NonExistentBucket,
RestrictedBucket,
Unauthorized,
)
from homeassistant.components.repairs import ConfirmRepairFlow
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import issue_registry as ir
from .const import CONF_BUCKET, DOMAIN
_LOGGER = logging.getLogger(__name__)
ISSUE_BUCKET_ACCESS_RESTRICTED = "bucket_access_restricted"
ISSUE_BUCKET_NOT_FOUND = "bucket_not_found"
def _create_issue(
hass: HomeAssistant,
entry: ConfigEntry,
issue_type: str,
bucket_name: str,
) -> None:
"""Create a repair issue with standard parameters."""
ir.async_create_issue(
hass,
DOMAIN,
f"{issue_type}_{entry.entry_id}",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.ERROR,
translation_key=issue_type,
translation_placeholders={
"brand_name": "Backblaze B2",
"title": entry.title,
"bucket_name": bucket_name,
"entry_id": entry.entry_id,
},
)
def create_bucket_access_restricted_issue(
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
) -> None:
"""Create a repair issue for restricted bucket access."""
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, bucket_name)
def create_bucket_not_found_issue(
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
) -> None:
"""Create a repair issue for non-existent bucket."""
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, bucket_name)
async def async_check_for_repair_issues(
hass: HomeAssistant, entry: ConfigEntry
) -> None:
"""Check for common issues that require user action."""
bucket = entry.runtime_data
restricted_issue_id = f"{ISSUE_BUCKET_ACCESS_RESTRICTED}_{entry.entry_id}"
not_found_issue_id = f"{ISSUE_BUCKET_NOT_FOUND}_{entry.entry_id}"
try:
await hass.async_add_executor_job(bucket.api.account_info.get_allowed)
ir.async_delete_issue(hass, DOMAIN, restricted_issue_id)
ir.async_delete_issue(hass, DOMAIN, not_found_issue_id)
except Unauthorized:
entry.async_start_reauth(hass)
except RestrictedBucket as err:
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, err.bucket_name)
except NonExistentBucket:
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, entry.data[CONF_BUCKET])
except B2Error as err:
_LOGGER.debug("B2 connectivity test failed: %s", err)
async def async_create_fix_flow(
hass: HomeAssistant,
issue_id: str,
data: dict[str, str | int | float | None] | None,
) -> ConfirmRepairFlow:
"""Create a fix flow for Backblaze B2 issues."""
return ConfirmRepairFlow()

View File

@@ -1,92 +0,0 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_bucket_name": "[%key:component::backblaze_b2::exceptions::invalid_bucket_name::message%]",
"invalid_capability": "[%key:component::backblaze_b2::exceptions::invalid_capability::message%]",
"invalid_credentials": "[%key:component::backblaze_b2::exceptions::invalid_credentials::message%]",
"invalid_prefix": "[%key:component::backblaze_b2::exceptions::invalid_prefix::message%]",
"restricted_bucket": "[%key:component::backblaze_b2::exceptions::restricted_bucket::message%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"reauth_confirm": {
"data": {
"application_key": "Application key",
"key_id": "Key ID"
},
"data_description": {
"application_key": "Application key to connect to {brand_name}",
"key_id": "Key ID to connect to {brand_name}"
},
"description": "Update your {brand_name} credentials for bucket {bucket}.",
"title": "Reauthenticate {brand_name}"
},
"reconfigure": {
"data": {
"application_key": "Application key",
"bucket": "Bucket name",
"key_id": "Key ID",
"prefix": "Folder prefix (optional)"
},
"data_description": {
"application_key": "Application key to connect to {brand_name}",
"bucket": "Bucket must already exist and be writable by the provided credentials.",
"key_id": "Key ID to connect to {brand_name}",
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
},
"title": "Reconfigure {brand_name}"
},
"user": {
"data": {
"application_key": "Application key",
"bucket": "Bucket name",
"key_id": "Key ID",
"prefix": "Folder prefix (optional)"
},
"data_description": {
"application_key": "Application key to connect to {brand_name}",
"bucket": "Bucket must already exist and be writable by the provided credentials.",
"key_id": "Key ID to connect to {brand_name}",
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
},
"title": "Add {brand_name} backup"
}
}
},
"exceptions": {
"cannot_connect": {
"message": "Cannot connect to endpoint"
},
"invalid_bucket_name": {
"message": "Bucket does not exist or is not writable by the provided credentials."
},
"invalid_capability": {
"message": "Application key does not have the required read/write capabilities."
},
"invalid_credentials": {
"message": "Bucket cannot be accessed using provided of key ID and application key."
},
"invalid_prefix": {
"message": "Prefix is not allowed for provided key. Must start with {allowed_prefix}."
},
"restricted_bucket": {
"message": "Application key is restricted to bucket {restricted_bucket_name}."
}
},
"issues": {
"bucket_access_restricted": {
"description": "Access to your {brand_name} bucket {bucket_name} is restricted for the current credentials. This means your application key may only have access to specific buckets, but not this one. To fix this issue:\n\n1. Log in to your {brand_name} account\n2. Check your application key restrictions\n3. Either use a different bucket that your key can access, or create a new application key with access to {bucket_name}\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
"title": "{brand_name} bucket access restricted"
},
"bucket_not_found": {
"description": "The {brand_name} bucket {bucket_name} cannot be found or accessed. This could mean:\n\n1. The bucket was deleted\n2. The bucket name was changed\n3. Your credentials no longer have access to this bucket\n\nTo fix this issue:\n\n1. Log in to your {brand_name} account\n2. Verify the bucket still exists and check its name\n3. Ensure your application key has access to this bucket\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
"title": "{brand_name} bucket not found"
}
}
}

View File

@@ -8,6 +8,6 @@
"integration_type": "service",
"iot_class": "calculated",
"quality_scale": "internal",
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
"single_config_entry": true
}

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["mozart-api==5.1.0.247.1"],
"requirements": ["mozart-api==4.1.1.116.4"],
"zeroconf": ["_bangolufsen._tcp.local."]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/blue_current",
"iot_class": "cloud_push",
"loggers": ["bluecurrent_api"],
"requirements": ["bluecurrent-api==1.3.2"]
"requirements": ["bluecurrent-api==1.3.1"]
}

View File

@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
title=self._discovered_devices[address], data={}
)
current_addresses = self._async_current_ids(include_ignore=False)
current_addresses = self._async_current_ids()
for discovery_info in async_discovered_service_info(self.hass, False):
address = discovery_info.address
if address in current_addresses or address in self._discovered_devices:

View File

@@ -20,7 +20,7 @@
"bluetooth-adapters==2.1.0",
"bluetooth-auto-recovery==1.5.3",
"bluetooth-data-tools==1.28.4",
"dbus-fast==2.45.0",
"dbus-fast==2.44.5",
"habluetooth==5.7.0"
]
}

View File

@@ -99,12 +99,6 @@ def deserialize_entity_description(
descriptions_class = descriptions_class._dataclass # noqa: SLF001
for field in cached_fields(descriptions_class):
field_name = field.name
# Only set fields that are in the data
# otherwise we would override default values with None
# causing side effects
if field_name not in data:
continue
# It would be nice if field.type returned the actual
# type instead of a str so we could avoid writing this
# out, but it doesn't. If we end up using this in more

View File

@@ -9,7 +9,7 @@ from brother import Brother, SnmpError
from homeassistant.components.snmp import async_get_snmp_engine
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryNotReady
from .const import (
CONF_COMMUNITY,
@@ -50,15 +50,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
await coordinator.async_config_entry_first_refresh()
if brother.serial.lower() != entry.unique_id:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="serial_mismatch",
translation_placeholders={
"device": entry.title,
},
)
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)

View File

@@ -13,7 +13,6 @@ from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import section
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from homeassistant.util.network import is_host_valid
@@ -22,7 +21,6 @@ from .const import (
DEFAULT_COMMUNITY,
DEFAULT_PORT,
DOMAIN,
PRINTER_TYPE_LASER,
PRINTER_TYPES,
SECTION_ADVANCED_SETTINGS,
)
@@ -30,12 +28,7 @@ from .const import (
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
SelectSelectorConfig(
options=PRINTER_TYPES,
translation_key="printer_type",
)
),
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
vol.Required(SECTION_ADVANCED_SETTINGS): section(
vol.Schema(
{
@@ -49,12 +42,7 @@ DATA_SCHEMA = vol.Schema(
)
ZEROCONF_SCHEMA = vol.Schema(
{
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
SelectSelectorConfig(
options=PRINTER_TYPES,
translation_key="printer_type",
)
),
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
vol.Required(SECTION_ADVANCED_SETTINGS): section(
vol.Schema(
{

View File

@@ -7,10 +7,7 @@ from typing import Final
DOMAIN: Final = "brother"
PRINTER_TYPE_LASER = "laser"
PRINTER_TYPE_INK = "ink"
PRINTER_TYPES: Final = [PRINTER_TYPE_LASER, PRINTER_TYPE_INK]
PRINTER_TYPES: Final = ["laser", "ink"]
UPDATE_INTERVAL = timedelta(seconds=30)

View File

@@ -1,30 +0,0 @@
"""Define the Brother entity."""
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import BrotherDataUpdateCoordinator
class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
"""Define a Brother Printer entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: BrotherDataUpdateCoordinator,
) -> None:
"""Initialize."""
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
configuration_url=f"http://{coordinator.brother.host}/",
identifiers={(DOMAIN, coordinator.brother.serial)},
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
serial_number=coordinator.brother.serial,
manufacturer="Brother",
model=coordinator.brother.model,
name=coordinator.brother.model,
sw_version=coordinator.brother.firmware,
)

View File

@@ -19,15 +19,13 @@ from homeassistant.components.sensor import (
from homeassistant.const import PERCENTAGE, EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
from .entity import BrotherPrinterEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
ATTR_COUNTER = "counter"
ATTR_REMAINING_PAGES = "remaining_pages"
@@ -332,9 +330,12 @@ async def async_setup_entry(
)
class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
"""Define a Brother Printer sensor."""
class BrotherPrinterSensor(
CoordinatorEntity[BrotherDataUpdateCoordinator], SensorEntity
):
"""Define an Brother Printer sensor."""
_attr_has_entity_name = True
entity_description: BrotherSensorEntityDescription
def __init__(
@@ -344,7 +345,16 @@ class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
) -> None:
"""Initialize."""
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
configuration_url=f"http://{coordinator.brother.host}/",
identifiers={(DOMAIN, coordinator.brother.serial)},
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
serial_number=coordinator.brother.serial,
manufacturer="Brother",
model=coordinator.brother.model,
name=coordinator.brother.model,
sw_version=coordinator.brother.firmware,
)
self._attr_native_value = description.value(coordinator.data)
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
self.entity_description = description

View File

@@ -38,11 +38,11 @@
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
"type": "Printer type"
"type": "Type of the printer"
},
"data_description": {
"host": "The hostname or IP address of the Brother printer to control.",
"type": "The type of the Brother printer."
"type": "Brother printer type: ink or laser."
},
"sections": {
"advanced_settings": {
@@ -207,19 +207,8 @@
"cannot_connect": {
"message": "An error occurred while connecting to the {device} printer: {error}"
},
"serial_mismatch": {
"message": "The serial number for {device} doesn't match the one in the configuration. It's possible that the two Brother printers have swapped IP addresses. Restore the previous IP address configuration or reconfigure the devices with Home Assistant."
},
"update_error": {
"message": "An error occurred while retrieving data from the {device} printer: {error}"
}
},
"selector": {
"printer_type": {
"options": {
"ink": "ink",
"laser": "laser"
}
}
}
}

View File

@@ -189,7 +189,7 @@ class BryantEvolutionClimate(ClimateEntity):
return HVACAction.HEATING
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_parse_hvac_action",
translation_key="failed_to_parse_hvac_mode",
translation_placeholders={
"mode_and_active": mode_and_active,
"current_temperature": str(self.current_temperature),

View File

@@ -24,7 +24,7 @@
},
"exceptions": {
"failed_to_parse_hvac_action": {
"message": "Could not determine HVAC action: {mode_and_active}, {current_temperature}, {target_temperature_low}"
"message": "Could not determine HVAC action: {mode_and_active}, {self.current_temperature}, {self.target_temperature_low}"
},
"failed_to_parse_hvac_mode": {
"message": "Cannot parse response to HVACMode: {mode}"

View File

@@ -63,7 +63,6 @@ BINARY_SENSOR_DESCRIPTIONS = {
),
BTHomeBinarySensorDeviceClass.GENERIC: BinarySensorEntityDescription(
key=BTHomeBinarySensorDeviceClass.GENERIC,
translation_key="generic",
),
BTHomeBinarySensorDeviceClass.LIGHT: BinarySensorEntityDescription(
key=BTHomeBinarySensorDeviceClass.LIGHT,
@@ -160,7 +159,10 @@ def sensor_update_to_bluetooth_data_update(
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.binary_entity_values.items()
},
entity_names={},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.binary_entity_values.items()
},
)

View File

@@ -59,7 +59,6 @@ SENSOR_DESCRIPTIONS = {
key=f"{BTHomeSensorDeviceClass.ACCELERATION}_{Units.ACCELERATION_METERS_PER_SQUARE_SECOND}",
native_unit_of_measurement=Units.ACCELERATION_METERS_PER_SQUARE_SECOND,
state_class=SensorStateClass.MEASUREMENT,
translation_key="acceleration",
),
# Battery (percent)
(BTHomeSensorDeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription(
@@ -73,7 +72,6 @@ SENSOR_DESCRIPTIONS = {
(BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
state_class=SensorStateClass.MEASUREMENT,
translation_key="channel",
),
# Conductivity (μS/cm)
(
@@ -89,7 +87,6 @@ SENSOR_DESCRIPTIONS = {
(BTHomeSensorDeviceClass.COUNT, None): SensorEntityDescription(
key=str(BTHomeSensorDeviceClass.COUNT),
state_class=SensorStateClass.MEASUREMENT,
translation_key="count",
),
# CO2 (parts per million)
(
@@ -117,14 +114,12 @@ SENSOR_DESCRIPTIONS = {
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
translation_key="dew_point",
),
# Directions (°)
(BTHomeExtendedSensorDeviceClass.DIRECTION, Units.DEGREE): SensorEntityDescription(
key=f"{BTHomeExtendedSensorDeviceClass.DIRECTION}_{Units.DEGREE}",
native_unit_of_measurement=DEGREE,
state_class=SensorStateClass.MEASUREMENT,
translation_key="direction",
),
# Distance (mm)
(
@@ -178,7 +173,6 @@ SENSOR_DESCRIPTIONS = {
key=f"{BTHomeSensorDeviceClass.GYROSCOPE}_{Units.GYROSCOPE_DEGREES_PER_SECOND}",
native_unit_of_measurement=Units.GYROSCOPE_DEGREES_PER_SECOND,
state_class=SensorStateClass.MEASUREMENT,
translation_key="gyroscope",
),
# Humidity in (percent)
(BTHomeSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
@@ -221,7 +215,6 @@ SENSOR_DESCRIPTIONS = {
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
translation_key="packet_id",
),
# PM10 (μg/m3)
(
@@ -270,14 +263,12 @@ SENSOR_DESCRIPTIONS = {
# Raw (-)
(BTHomeExtendedSensorDeviceClass.RAW, None): SensorEntityDescription(
key=str(BTHomeExtendedSensorDeviceClass.RAW),
translation_key="raw",
),
# Rotation (°)
(BTHomeSensorDeviceClass.ROTATION, Units.DEGREE): SensorEntityDescription(
key=f"{BTHomeSensorDeviceClass.ROTATION}_{Units.DEGREE}",
native_unit_of_measurement=DEGREE,
state_class=SensorStateClass.MEASUREMENT,
translation_key="rotation",
),
# Rotational speed (rpm)
(
@@ -287,7 +278,6 @@ SENSOR_DESCRIPTIONS = {
key=f"{BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED}_{Units.REVOLUTIONS_PER_MINUTE}",
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
state_class=SensorStateClass.MEASUREMENT,
translation_key="rotational_speed",
),
# Signal Strength (RSSI) (dB)
(
@@ -321,7 +311,6 @@ SENSOR_DESCRIPTIONS = {
# Text (-)
(BTHomeExtendedSensorDeviceClass.TEXT, None): SensorEntityDescription(
key=str(BTHomeExtendedSensorDeviceClass.TEXT),
translation_key="text",
),
# Timestamp (datetime object)
(
@@ -338,7 +327,6 @@ SENSOR_DESCRIPTIONS = {
): SensorEntityDescription(
key=str(BTHomeSensorDeviceClass.UV_INDEX),
state_class=SensorStateClass.MEASUREMENT,
translation_key="uv_index",
),
# Volatile organic Compounds (VOC) (μg/m3)
(
@@ -435,7 +423,10 @@ def sensor_update_to_bluetooth_data_update(
)
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
)

View File

@@ -47,11 +47,6 @@
}
},
"entity": {
"binary_sensor": {
"generic": {
"name": "Generic"
}
},
"event": {
"button": {
"state_attributes": {
@@ -78,44 +73,6 @@
}
}
}
},
"sensor": {
"acceleration": {
"name": "Acceleration"
},
"channel": {
"name": "Channel"
},
"count": {
"name": "Count"
},
"dew_point": {
"name": "Dew point"
},
"direction": {
"name": "Direction"
},
"gyroscope": {
"name": "Gyroscope"
},
"packet_id": {
"name": "Packet ID"
},
"raw": {
"name": "Raw"
},
"rotation": {
"name": "Rotation"
},
"rotational_speed": {
"name": "Rotational speed"
},
"text": {
"name": "Text"
},
"uv_index": {
"name": "UV Index"
}
}
}
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/caldav",
"iot_class": "cloud_polling",
"loggers": ["caldav", "vobject"],
"requirements": ["caldav==2.1.0", "icalendar==6.3.1", "vobject==0.9.9"]
"requirements": ["caldav==1.6.0", "icalendar==6.3.1"]
}

View File

@@ -57,9 +57,9 @@ async def _async_reproduce_states(
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
if (
(state.attributes.get(ATTR_TEMPERATURE) is not None)
or (state.attributes.get(ATTR_TARGET_TEMP_HIGH) is not None)
or (state.attributes.get(ATTR_TARGET_TEMP_LOW) is not None)
(ATTR_TEMPERATURE in state.attributes)
or (ATTR_TARGET_TEMP_HIGH in state.attributes)
or (ATTR_TARGET_TEMP_LOW in state.attributes)
):
await call_service(
SERVICE_SET_TEMPERATURE,

View File

@@ -285,93 +285,5 @@
"name": "[%key:common::action::turn_on%]"
}
},
"title": "Climate",
"triggers": {
"cooling": {
"description": "Triggers when a climate starts cooling.",
"name": "When a climate starts cooling"
},
"current_humidity_changed": {
"description": "Triggers when the current humidity of a climate changes.",
"fields": {
"above": {
"description": "Only trigger when the current humidity goes above this value.",
"name": "Above"
},
"below": {
"description": "Only trigger when the current humidity goes below this value.",
"name": "Below"
}
},
"name": "When current humidity changes"
},
"current_temperature_changed": {
"description": "Triggers when the current temperature of a climate changes.",
"fields": {
"above": {
"description": "Only trigger when the current temperature goes above this value.",
"name": "Above"
},
"below": {
"description": "Only trigger when the current temperature goes below this value.",
"name": "Below"
}
},
"name": "When current temperature changes"
},
"drying": {
"description": "Triggers when a climate starts drying.",
"name": "When a climate starts drying"
},
"heating": {
"description": "Triggers when a climate starts heating.",
"name": "When a climate starts heating"
},
"mode_changed": {
"description": "Triggers when the HVAC mode of a climate changes.",
"fields": {
"hvac_mode": {
"description": "The HVAC modes to trigger on. If empty, triggers on all mode changes.",
"name": "HVAC modes"
}
},
"name": "When HVAC mode changes"
},
"target_humidity_changed": {
"description": "Triggers when the target humidity of a climate changes.",
"fields": {
"above": {
"description": "Only trigger when the target humidity goes above this value.",
"name": "Above"
},
"below": {
"description": "Only trigger when the target humidity goes below this value.",
"name": "Below"
}
},
"name": "When target humidity changes"
},
"target_temperature_changed": {
"description": "Triggers when the target temperature of a climate changes.",
"fields": {
"above": {
"description": "Only trigger when the target temperature goes above this value.",
"name": "Above"
},
"below": {
"description": "Only trigger when the target temperature goes below this value.",
"name": "Below"
}
},
"name": "When target temperature changes"
},
"turns_off": {
"description": "Triggers when a climate turns off.",
"name": "When a climate turns off"
},
"turns_on": {
"description": "Triggers when a climate turns on.",
"name": "When a climate turns on"
}
}
"title": "Climate"
}

View File

@@ -1,817 +0,0 @@
"""Provides triggers for climate."""
from typing import TYPE_CHECKING, cast, override
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_TEMPERATURE,
CONF_ABOVE,
CONF_BELOW,
CONF_OPTIONS,
CONF_TARGET,
STATE_UNAVAILABLE,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.target import (
TargetStateChangedData,
async_track_target_selector_state_change_event,
)
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
from homeassistant.helpers.typing import ConfigType
from .const import (
ATTR_CURRENT_HUMIDITY,
ATTR_CURRENT_TEMPERATURE,
ATTR_HUMIDITY,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODE,
DOMAIN,
HVAC_MODES,
HVACMode,
)
CLIMATE_TRIGGER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_OPTIONS, default={}): {},
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
MODE_CHANGED_TRIGGER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_OPTIONS, default={}): {
vol.Optional(ATTR_HVAC_MODE, default=[]): vol.All(
cv.ensure_list, [vol.In(HVAC_MODES)]
),
},
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
THRESHOLD_TRIGGER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_OPTIONS, default={}): {
vol.Optional(CONF_ABOVE): vol.Coerce(float),
vol.Optional(CONF_BELOW): vol.Coerce(float),
},
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
class ClimateTurnsOnTrigger(Trigger):
"""Trigger for when a climate turns on."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate turns on trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if climate turned on (from off to any other mode)
if (
from_state is not None
and from_state.state == HVACMode.OFF
and to_state.state != HVACMode.OFF
):
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} turned on",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateTurnsOffTrigger(Trigger):
"""Trigger for when a climate turns off."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate turns off trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if climate turned off (from any mode to off)
if (
from_state is not None
and from_state.state != HVACMode.OFF
and to_state.state == HVACMode.OFF
):
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} turned off",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateModeChangedTrigger(Trigger):
"""Trigger for when a climate mode changes."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, MODE_CHANGED_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate mode changed trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
hvac_modes_filter = self._options[ATTR_HVAC_MODE]
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if hvac_mode changed
if from_state is not None and from_state.state != to_state.state:
# If hvac_modes filter is specified, check if the new mode matches
if hvac_modes_filter and to_state.state not in hvac_modes_filter:
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} mode changed to {to_state.state}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateCoolingTrigger(Trigger):
"""Trigger for when a climate starts cooling."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate cooling trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if climate started cooling
from_action = from_state.attributes.get(ATTR_HVAC_ACTION) if from_state else None
to_action = to_state.attributes.get(ATTR_HVAC_ACTION)
if from_action != "cooling" and to_action == "cooling":
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} started cooling",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateHeatingTrigger(Trigger):
"""Trigger for when a climate starts heating."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate heating trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if climate started heating
from_action = from_state.attributes.get(ATTR_HVAC_ACTION) if from_state else None
to_action = to_state.attributes.get(ATTR_HVAC_ACTION)
if from_action != "heating" and to_action == "heating":
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} started heating",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateDryingTrigger(Trigger):
"""Trigger for when a climate starts drying."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate drying trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if climate started drying
from_action = from_state.attributes.get(ATTR_HVAC_ACTION) if from_state else None
to_action = to_state.attributes.get(ATTR_HVAC_ACTION)
if from_action != "drying" and to_action == "drying":
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} started drying",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateTargetTemperatureChangedTrigger(Trigger):
"""Trigger for when a climate target temperature changes."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, THRESHOLD_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate target temperature changed trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
above = self._options.get(CONF_ABOVE)
below = self._options.get(CONF_BELOW)
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if target temperature changed
from_temp = (
from_state.attributes.get(ATTR_TEMPERATURE) if from_state else None
)
to_temp = to_state.attributes.get(ATTR_TEMPERATURE)
if to_temp is None or from_temp == to_temp:
return
# Apply threshold filters if specified
if above is not None and to_temp <= above:
return
if below is not None and to_temp >= below:
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} target temperature changed to {to_temp}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateCurrentTemperatureChangedTrigger(Trigger):
"""Trigger for when a climate current temperature changes."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, THRESHOLD_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate current temperature changed trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
above = self._options.get(CONF_ABOVE)
below = self._options.get(CONF_BELOW)
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if current temperature changed
from_temp = (
from_state.attributes.get(ATTR_CURRENT_TEMPERATURE)
if from_state
else None
)
to_temp = to_state.attributes.get(ATTR_CURRENT_TEMPERATURE)
if to_temp is None or from_temp == to_temp:
return
# Apply threshold filters if specified
if above is not None and to_temp <= above:
return
if below is not None and to_temp >= below:
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} current temperature changed to {to_temp}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateTargetHumidityChangedTrigger(Trigger):
"""Trigger for when a climate target humidity changes."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, THRESHOLD_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate target humidity changed trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
above = self._options.get(CONF_ABOVE)
below = self._options.get(CONF_BELOW)
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if target humidity changed
from_humidity = (
from_state.attributes.get(ATTR_HUMIDITY) if from_state else None
)
to_humidity = to_state.attributes.get(ATTR_HUMIDITY)
if to_humidity is None or from_humidity == to_humidity:
return
# Apply threshold filters if specified
if above is not None and to_humidity <= above:
return
if below is not None and to_humidity >= below:
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} target humidity changed to {to_humidity}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
class ClimateCurrentHumidityChangedTrigger(Trigger):
"""Trigger for when a climate current humidity changes."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, THRESHOLD_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the climate current humidity changed trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
assert config.target is not None
self._options = config.options
self._target = config.target
@override
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
above = self._options.get(CONF_ABOVE)
below = self._options.get(CONF_BELOW)
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
# Ignore unavailable states
if to_state is None or to_state.state == STATE_UNAVAILABLE:
return
# Check if current humidity changed
from_humidity = (
from_state.attributes.get(ATTR_CURRENT_HUMIDITY)
if from_state
else None
)
to_humidity = to_state.attributes.get(ATTR_CURRENT_HUMIDITY)
if to_humidity is None or from_humidity == to_humidity:
return
# Apply threshold filters if specified
if above is not None and to_humidity <= above:
return
if below is not None and to_humidity >= below:
return
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"climate {entity_id} current humidity changed to {to_humidity}",
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, entity_filter
)
TRIGGERS: dict[str, type[Trigger]] = {
"turns_on": ClimateTurnsOnTrigger,
"turns_off": ClimateTurnsOffTrigger,
"mode_changed": ClimateModeChangedTrigger,
"cooling": ClimateCoolingTrigger,
"heating": ClimateHeatingTrigger,
"drying": ClimateDryingTrigger,
"target_temperature_changed": ClimateTargetTemperatureChangedTrigger,
"current_temperature_changed": ClimateCurrentTemperatureChangedTrigger,
"target_humidity_changed": ClimateTargetHumidityChangedTrigger,
"current_humidity_changed": ClimateCurrentHumidityChangedTrigger,
}
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
"""Return the triggers for climate."""
return TRIGGERS

View File

@@ -1,128 +0,0 @@
turns_on:
target:
entity:
domain: climate
turns_off:
target:
entity:
domain: climate
mode_changed:
target:
entity:
domain: climate
fields:
hvac_mode:
required: false
default: []
selector:
select:
multiple: true
mode: dropdown
options:
- label: "Off"
value: "off"
- label: "Heat"
value: "heat"
- label: "Cool"
value: "cool"
- label: "Heat/Cool"
value: "heat_cool"
- label: "Auto"
value: "auto"
- label: "Dry"
value: "dry"
- label: "Fan only"
value: "fan_only"
cooling:
target:
entity:
domain: climate
heating:
target:
entity:
domain: climate
drying:
target:
entity:
domain: climate
target_temperature_changed:
target:
entity:
domain: climate
fields:
above:
required: false
selector:
number:
mode: box
step: 0.1
below:
required: false
selector:
number:
mode: box
step: 0.1
current_temperature_changed:
target:
entity:
domain: climate
fields:
above:
required: false
selector:
number:
mode: box
step: 0.1
below:
required: false
selector:
number:
mode: box
step: 0.1
target_humidity_changed:
target:
entity:
domain: climate
fields:
above:
required: false
selector:
number:
mode: box
min: 0
max: 100
below:
required: false
selector:
number:
mode: box
min: 0
max: 100
current_humidity_changed:
target:
entity:
domain: climate
fields:
above:
required: false
selector:
number:
mode: box
min: 0
max: 100
below:
required: false
selector:
number:
mode: box
min: 0
max: 100

View File

@@ -115,37 +115,26 @@ GACTIONS_SCHEMA = ASSISTANT_SCHEMA.extend(
{vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: GOOGLE_ENTITY_SCHEMA}}
)
_BASE_CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(CONF_COGNITO_CLIENT_ID): str,
vol.Optional(CONF_USER_POOL_ID): str,
vol.Optional(CONF_REGION): str,
vol.Optional(CONF_ALEXA): ALEXA_SCHEMA,
vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA,
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_API_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Any(
_BASE_CONFIG_SCHEMA.extend(
{
vol.Required(CONF_MODE): vol.In([MODE_DEV]),
vol.Required(CONF_API_SERVER): str,
}
),
_BASE_CONFIG_SCHEMA.extend(
{
vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.In([MODE_PROD]),
}
),
DOMAIN: vol.Schema(
{
vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.In(
[MODE_DEV, MODE_PROD]
),
vol.Optional(CONF_COGNITO_CLIENT_ID): str,
vol.Optional(CONF_USER_POOL_ID): str,
vol.Optional(CONF_REGION): str,
vol.Optional(CONF_ALEXA): ALEXA_SCHEMA,
vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA,
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_API_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,
}
)
},
extra=vol.ALLOW_EXTRA,

View File

@@ -71,11 +71,8 @@ async def _get_services(hass: HomeAssistant) -> list[dict[str, Any]]:
services = await account_link.async_fetch_available_services(
hass.data[DATA_CLOUD]
)
except (aiohttp.ClientError, TimeoutError) as err:
raise config_entry_oauth2_flow.ImplementationUnavailableError(
"Cannot provide OAuth2 implementation for cloud services. "
"Failed to fetch from account link server."
) from err
except (aiohttp.ClientError, TimeoutError):
return []
hass.data[DATA_SERVICES] = services

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.5.1"],
"requirements": ["hass-nabucasa==1.4.0"],
"single_config_entry": true
}

View File

@@ -14,9 +14,8 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import ObjectClassType
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
from .utils import new_device_listener
from .utils import DeviceType, new_device_listener
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -31,7 +30,7 @@ async def async_setup_entry(
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
"""Add entities for new monitors."""
entities = [
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)

View File

@@ -37,6 +37,13 @@ USER_SCHEMA = vol.Schema(
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
}
)
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
@@ -168,55 +175,36 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle reconfiguration of the device."""
reconfigure_entry = self._get_reconfigure_entry()
if not user_input:
return self.async_show_form(
step_id="reconfigure", data_schema=STEP_RECONFIGURE
)
updated_host = user_input[CONF_HOST]
self._async_abort_entries_match({CONF_HOST: updated_host})
errors: dict[str, str] = {}
if user_input is not None:
updated_host = user_input[CONF_HOST]
self._async_abort_entries_match({CONF_HOST: updated_host})
try:
data_to_validate = {
CONF_HOST: updated_host,
CONF_PORT: user_input[CONF_PORT],
CONF_PIN: user_input[CONF_PIN],
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
}
await validate_input(self.hass, data_to_validate)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
data_updates = {
CONF_HOST: updated_host,
CONF_PORT: user_input[CONF_PORT],
CONF_PIN: user_input[CONF_PIN],
}
return self.async_update_reload_and_abort(
reconfigure_entry, data_updates=data_updates
)
schema = vol.Schema(
{
vol.Required(
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
): cv.string,
vol.Required(
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
): cv.port,
vol.Optional(CONF_PIN): cv.string,
}
)
try:
await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reconfigure_entry, data_updates={CONF_HOST: updated_host}
)
return self.async_show_form(
step_id="reconfigure",
data_schema=schema,
data_schema=STEP_RECONFIGURE,
errors=errors,
)

View File

@@ -2,20 +2,10 @@
import logging
from aiocomelit.api import (
ComelitSerialBridgeObject,
ComelitVedoAreaObject,
ComelitVedoZoneObject,
)
from aiocomelit.const import BRIDGE, VEDO
_LOGGER = logging.getLogger(__package__)
ObjectClassType = (
ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
)
DOMAIN = "comelit"
DEFAULT_PORT = 80
DEVICE_TYPE_LIST = [BRIDGE, VEDO]

View File

@@ -10,6 +10,8 @@ from aiocomelit.api import (
ComeliteSerialBridgeApi,
ComelitSerialBridgeObject,
ComelitVedoApi,
ComelitVedoAreaObject,
ComelitVedoZoneObject,
)
from aiocomelit.const import (
BRIDGE,
@@ -30,7 +32,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import _LOGGER, DOMAIN, SCAN_INTERVAL, ObjectClassType
from .const import _LOGGER, DOMAIN, SCAN_INTERVAL
type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator]
@@ -75,7 +77,9 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
def platform_device_info(
self,
object_class: ObjectClassType,
object_class: ComelitVedoZoneObject
| ComelitVedoAreaObject
| ComelitSerialBridgeObject,
object_type: str,
) -> dr.DeviceInfo:
"""Set platform device info."""

View File

@@ -12,10 +12,9 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity
from .const import ObjectClassType
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
from .entity import ComelitBridgeBaseEntity
from .utils import bridge_api_call, new_device_listener
from .utils import DeviceType, bridge_api_call, new_device_listener
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -30,7 +29,7 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
"""Add entities for new monitors."""
entities = [
ComelitCoverEntity(coordinator, device, config_entry.entry_id)

View File

@@ -10,10 +10,9 @@ from homeassistant.components.light import ColorMode, LightEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ObjectClassType
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
from .entity import ComelitBridgeBaseEntity
from .utils import bridge_api_call, new_device_listener
from .utils import DeviceType, bridge_api_call, new_device_listener
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -28,7 +27,7 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
"""Add entities for new monitors."""
entities = [
ComelitLightEntity(coordinator, device, config_entry.entry_id)

View File

@@ -18,10 +18,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import ObjectClassType
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
from .entity import ComelitBridgeBaseEntity
from .utils import new_device_listener
from .utils import DeviceType, new_device_listener
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -67,7 +66,7 @@ async def async_setup_bridge_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
"""Add entities for new monitors."""
entities = [
ComelitBridgeSensorEntity(
@@ -94,7 +93,7 @@ async def async_setup_vedo_entry(
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
"""Add entities for new monitors."""
entities = [
ComelitVedoSensorEntity(

View File

@@ -11,10 +11,9 @@ from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ObjectClassType
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
from .entity import ComelitBridgeBaseEntity
from .utils import bridge_api_call, new_device_listener
from .utils import DeviceType, bridge_api_call, new_device_listener
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -29,7 +28,7 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
"""Add entities for new monitors."""
entities = [
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)

View File

@@ -2,9 +2,13 @@
from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps
from typing import TYPE_CHECKING, Any, Concatenate
from typing import Any, Concatenate
from aiocomelit.api import ComelitSerialBridgeObject
from aiocomelit.api import (
ComelitSerialBridgeObject,
ComelitVedoAreaObject,
ComelitVedoZoneObject,
)
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
from aiohttp import ClientSession, CookieJar
@@ -18,10 +22,12 @@ from homeassistant.helpers import (
entity_registry as er,
)
from .const import _LOGGER, DOMAIN, ObjectClassType
from .const import _LOGGER, DOMAIN
from .coordinator import ComelitBaseCoordinator
from .entity import ComelitBridgeBaseEntity
DeviceType = ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
async def async_client_session(hass: HomeAssistant) -> ClientSession:
"""Return a new aiohttp session."""
@@ -120,7 +126,11 @@ def new_device_listener(
coordinator: ComelitBaseCoordinator,
new_devices_callback: Callable[
[
list[ObjectClassType],
list[
ComelitSerialBridgeObject
| ComelitVedoAreaObject
| ComelitVedoZoneObject
],
str,
],
None,
@@ -132,10 +142,10 @@ def new_device_listener(
def _check_devices() -> None:
"""Check for new devices and call callback with any new monitors."""
if TYPE_CHECKING:
assert coordinator.data
if not coordinator.data:
return
new_devices: list[ObjectClassType] = []
new_devices: list[DeviceType] = []
for _id in coordinator.data[data_type]:
if _id not in (id_list := known_devices.get(data_type, [])):
known_devices.update({data_type: [*id_list, _id]})

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
import logging
from typing import Any, Literal
from typing import Literal
from hassil.recognize import RecognizeResult
import voluptuous as vol
@@ -21,7 +21,6 @@ from homeassistant.core import (
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, intent
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.reload import async_integration_yaml_config
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
@@ -53,8 +52,6 @@ from .const import (
DATA_COMPONENT,
DOMAIN,
HOME_ASSISTANT_AGENT,
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
SERVICE_PROCESS,
SERVICE_RELOAD,
ConversationEntityFeature,
@@ -269,13 +266,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component
manager = get_agent_manager(hass)
hass_config_path = hass.config.path()
config_intents = _get_config_intents(config, hass_config_path)
manager.update_config_intents(config_intents)
await async_setup_default_agent(hass, entity_component)
agent_config = config.get(DOMAIN, {})
await async_setup_default_agent(
hass, entity_component, config_intents=agent_config.get("intents", {})
)
async def handle_process(service: ServiceCall) -> ServiceResponse:
"""Parse text into commands."""
@@ -300,16 +294,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def handle_reload(service: ServiceCall) -> None:
"""Reload intents."""
language = service.data.get(ATTR_LANGUAGE)
if language is None:
conf = await async_integration_yaml_config(hass, DOMAIN)
if conf is not None:
config_intents = _get_config_intents(conf, hass_config_path)
manager.update_config_intents(config_intents)
agent = manager.default_agent
agent = get_agent_manager(hass).default_agent
if agent is not None:
await agent.async_reload(language=language)
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
hass.services.async_register(
DOMAIN,
@@ -326,27 +313,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str, Any]:
"""Return config intents."""
intents = config.get(DOMAIN, {}).get("intents", {})
return {
"intents": {
intent_name: {
"data": [
{
"sentences": sentences,
"metadata": {
METADATA_CUSTOM_SENTENCE: True,
METADATA_CUSTOM_FILE: hass_config_path,
},
}
]
}
for intent_name, sentences in intents.items()
}
}
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)

View File

@@ -147,7 +147,6 @@ class AgentManager:
self.hass = hass
self._agents: dict[str, AbstractConversationAgent] = {}
self.default_agent: DefaultAgent | None = None
self.config_intents: dict[str, Any] = {}
self.triggers_details: list[TriggerDetails] = []
@callback
@@ -200,16 +199,9 @@ class AgentManager:
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
"""Set up the default agent."""
agent.update_config_intents(self.config_intents)
agent.update_triggers(self.triggers_details)
self.default_agent = agent
def update_config_intents(self, intents: dict[str, Any]) -> None:
"""Update config intents."""
self.config_intents = intents
if self.default_agent is not None:
self.default_agent.update_config_intents(intents)
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
"""Register a trigger."""
self.triggers_details.append(trigger_details)

View File

@@ -30,7 +30,3 @@ class ConversationEntityFeature(IntFlag):
"""Supported features of the conversation entity."""
CONTROL = 1
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"

View File

@@ -77,12 +77,7 @@ from homeassistant.util.json import JsonObjectType, json_loads_object
from .agent_manager import get_agent_manager
from .chat_log import AssistantContent, ChatLog
from .const import (
DOMAIN,
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
ConversationEntityFeature,
)
from .const import DOMAIN, ConversationEntityFeature
from .entity import ConversationEntity
from .models import ConversationInput, ConversationResult
from .trace import ConversationTraceEventType, async_conversation_trace_append
@@ -96,6 +91,8 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
ERROR_SENTINEL = object()
@@ -205,9 +202,10 @@ class IntentCache:
async def async_setup_default_agent(
hass: HomeAssistant,
entity_component: EntityComponent[ConversationEntity],
config_intents: dict[str, Any],
) -> None:
"""Set up entity registry listener for the default agent."""
agent = DefaultAgent(hass)
agent = DefaultAgent(hass, config_intents)
await entity_component.async_add_entities([agent])
await get_agent_manager(hass).async_setup_default_agent(agent)
@@ -232,14 +230,14 @@ class DefaultAgent(ConversationEntity):
_attr_name = "Home Assistant"
_attr_supported_features = ConversationEntityFeature.CONTROL
def __init__(self, hass: HomeAssistant) -> None:
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
"""Initialize the default agent."""
self.hass = hass
self._lang_intents: dict[str, LanguageIntents | object] = {}
self._load_intents_lock = asyncio.Lock()
# Intents from common conversation config
self._config_intents: dict[str, Any] = {}
# intent -> [sentences]
self._config_intents: dict[str, Any] = config_intents
# Sentences that will trigger a callback (skipping intent recognition)
self._triggers_details: list[TriggerDetails] = []
@@ -770,16 +768,7 @@ class DefaultAgent(ConversationEntity):
if lang_intents.fuzzy_matcher is None:
return None
context_area: str | None = None
satellite_area, _ = self._get_satellite_area_and_device(
user_input.satellite_id, user_input.device_id
)
if satellite_area:
context_area = satellite_area.name
fuzzy_result = lang_intents.fuzzy_matcher.match(
user_input.text, context_area=context_area
)
fuzzy_result = lang_intents.fuzzy_matcher.match(user_input.text)
if fuzzy_result is None:
return None
@@ -1037,14 +1026,6 @@ class DefaultAgent(ConversationEntity):
# Intents have changed, so we must clear the cache
self._intent_cache.clear()
@callback
def update_config_intents(self, intents: dict[str, Any]) -> None:
"""Update config intents."""
self._config_intents = intents
# Intents have changed, so we must clear the cache
self._intent_cache.clear()
async def async_prepare(self, language: str | None = None) -> None:
"""Load intents for a language."""
if language is None:
@@ -1169,10 +1150,33 @@ class DefaultAgent(ConversationEntity):
custom_sentences_path,
)
merge_dict(
intents_dict,
self._config_intents,
)
# Load sentences from HA config for default language only
if self._config_intents and (
self.hass.config.language in (language, language_variant)
):
hass_config_path = self.hass.config.path()
merge_dict(
intents_dict,
{
"intents": {
intent_name: {
"data": [
{
"sentences": sentences,
"metadata": {
METADATA_CUSTOM_SENTENCE: True,
METADATA_CUSTOM_FILE: hass_config_path,
},
}
]
}
for intent_name, sentences in self._config_intents.items()
}
},
)
_LOGGER.debug(
"Loaded intents from configuration.yaml",
)
if not intents_dict:
return None
@@ -1236,14 +1240,15 @@ class DefaultAgent(ConversationEntity):
intent_slot_list_names=self._fuzzy_config.slot_list_names,
slot_combinations={
intent_name: {
combo_key: SlotCombinationInfo(
context_area=combo_info.context_area,
name_domains=(
set(combo_info.name_domains)
if combo_info.name_domains
else None
),
)
combo_key: [
SlotCombinationInfo(
name_domains=(
set(combo_info.name_domains)
if combo_info.name_domains
else None
)
)
]
for combo_key, combo_info in intent_combos.items()
}
for intent_name, intent_combos in self._fuzzy_config.slot_combinations.items()

Some files were not shown because too many files have changed in this diff Show More