mirror of
https://github.com/home-assistant/core.git
synced 2025-11-14 21:40:16 +00:00
Compare commits
3 Commits
epenet-202
...
cursor/add
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b2fe77b7f5 | ||
|
|
d984e4398e | ||
|
|
75bd1a0310 |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -88,10 +88,6 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
exclude:
|
|
||||||
- arch: armv7
|
|
||||||
- arch: armhf
|
|
||||||
- arch: i386
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|||||||
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CACHE_VERSION: 2
|
CACHE_VERSION: 1
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 1
|
MYPY_CACHE_VERSION: 1
|
||||||
HA_SHORT_VERSION: "2025.12"
|
HA_SHORT_VERSION: "2025.12"
|
||||||
@@ -622,7 +622,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- *checkout
|
- *checkout
|
||||||
- name: Dependency review
|
- name: Dependency review
|
||||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
|
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
|
||||||
with:
|
with:
|
||||||
license-check: false # We use our own license audit checks
|
license-check: false # We use our own license audit checks
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
|||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
|||||||
@@ -362,7 +362,6 @@ homeassistant.components.myuplink.*
|
|||||||
homeassistant.components.nam.*
|
homeassistant.components.nam.*
|
||||||
homeassistant.components.nanoleaf.*
|
homeassistant.components.nanoleaf.*
|
||||||
homeassistant.components.nasweb.*
|
homeassistant.components.nasweb.*
|
||||||
homeassistant.components.neato.*
|
|
||||||
homeassistant.components.nest.*
|
homeassistant.components.nest.*
|
||||||
homeassistant.components.netatmo.*
|
homeassistant.components.netatmo.*
|
||||||
homeassistant.components.network.*
|
homeassistant.components.network.*
|
||||||
|
|||||||
12
CODEOWNERS
generated
12
CODEOWNERS
generated
@@ -1017,8 +1017,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/msteams/ @peroyvind
|
/homeassistant/components/msteams/ @peroyvind
|
||||||
/homeassistant/components/mullvad/ @meichthys
|
/homeassistant/components/mullvad/ @meichthys
|
||||||
/tests/components/mullvad/ @meichthys
|
/tests/components/mullvad/ @meichthys
|
||||||
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
|
/homeassistant/components/music_assistant/ @music-assistant
|
||||||
/tests/components/music_assistant/ @music-assistant @arturpragacz
|
/tests/components/music_assistant/ @music-assistant
|
||||||
/homeassistant/components/mutesync/ @currentoor
|
/homeassistant/components/mutesync/ @currentoor
|
||||||
/tests/components/mutesync/ @currentoor
|
/tests/components/mutesync/ @currentoor
|
||||||
/homeassistant/components/my/ @home-assistant/core
|
/homeassistant/components/my/ @home-assistant/core
|
||||||
@@ -1539,8 +1539,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/suez_water/ @ooii @jb101010-2
|
/tests/components/suez_water/ @ooii @jb101010-2
|
||||||
/homeassistant/components/sun/ @home-assistant/core
|
/homeassistant/components/sun/ @home-assistant/core
|
||||||
/tests/components/sun/ @home-assistant/core
|
/tests/components/sun/ @home-assistant/core
|
||||||
/homeassistant/components/sunricher_dali/ @niracler
|
/homeassistant/components/sunricher_dali_center/ @niracler
|
||||||
/tests/components/sunricher_dali/ @niracler
|
/tests/components/sunricher_dali_center/ @niracler
|
||||||
/homeassistant/components/supla/ @mwegrzynek
|
/homeassistant/components/supla/ @mwegrzynek
|
||||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||||
@@ -1817,8 +1817,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ws66i/ @ssaenger
|
/tests/components/ws66i/ @ssaenger
|
||||||
/homeassistant/components/wyoming/ @synesthesiam
|
/homeassistant/components/wyoming/ @synesthesiam
|
||||||
/tests/components/wyoming/ @synesthesiam
|
/tests/components/wyoming/ @synesthesiam
|
||||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
/homeassistant/components/xbox/ @hunterjm
|
||||||
/tests/components/xbox/ @hunterjm @tr4nt0r
|
/tests/components/xbox/ @hunterjm
|
||||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||||
|
|||||||
@@ -1,10 +1,7 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
||||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.11.0
|
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.11.0
|
|
||||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
|
||||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.11.0
|
|
||||||
cosign:
|
cosign:
|
||||||
base_identity: https://github.com/home-assistant/docker/.*
|
base_identity: https://github.com/home-assistant/docker/.*
|
||||||
identity: https://github.com/home-assistant/core/.*
|
identity: https://github.com/home-assistant/core/.*
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ Sending HOTP through notify service
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections import OrderedDict
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
@@ -303,14 +304,13 @@ class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
|||||||
if not self._available_notify_services:
|
if not self._available_notify_services:
|
||||||
return self.async_abort(reason="no_available_service")
|
return self.async_abort(reason="no_available_service")
|
||||||
|
|
||||||
schema = vol.Schema(
|
schema: dict[str, Any] = OrderedDict()
|
||||||
{
|
schema["notify_service"] = vol.In(self._available_notify_services)
|
||||||
vol.Required("notify_service"): vol.In(self._available_notify_services),
|
schema["target"] = vol.Optional(str)
|
||||||
vol.Optional("target"): str,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_show_form(step_id="init", data_schema=schema, errors=errors)
|
return self.async_show_form(
|
||||||
|
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||||
|
)
|
||||||
|
|
||||||
async def async_step_setup(
|
async def async_step_setup(
|
||||||
self, user_input: dict[str, str] | None = None
|
self, user_input: dict[str, str] | None = None
|
||||||
|
|||||||
@@ -179,18 +179,12 @@ class Data:
|
|||||||
user_hash = base64.b64decode(found["password"])
|
user_hash = base64.b64decode(found["password"])
|
||||||
|
|
||||||
# bcrypt.checkpw is timing-safe
|
# bcrypt.checkpw is timing-safe
|
||||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
if not bcrypt.checkpw(password.encode(), user_hash):
|
||||||
# Previously the password was silently truncated.
|
|
||||||
# https://github.com/pyca/bcrypt/pull/1000
|
|
||||||
if not bcrypt.checkpw(password.encode()[:72], user_hash):
|
|
||||||
raise InvalidAuth
|
raise InvalidAuth
|
||||||
|
|
||||||
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
||||||
"""Encode a password."""
|
"""Encode a password."""
|
||||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
||||||
# Previously the password was silently truncated.
|
|
||||||
# https://github.com/pyca/bcrypt/pull/1000
|
|
||||||
hashed: bytes = bcrypt.hashpw(password.encode()[:72], bcrypt.gensalt(rounds=12))
|
|
||||||
|
|
||||||
if for_storage:
|
if for_storage:
|
||||||
hashed = base64.b64encode(hashed)
|
hashed = base64.b64encode(hashed)
|
||||||
|
|||||||
@@ -1,5 +1,11 @@
|
|||||||
{
|
{
|
||||||
"domain": "yale",
|
"domain": "yale",
|
||||||
"name": "Yale (non-US/Canada)",
|
"name": "Yale",
|
||||||
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
|
"integrations": [
|
||||||
|
"august",
|
||||||
|
"yale_smart_alarm",
|
||||||
|
"yalexs_ble",
|
||||||
|
"yale_home",
|
||||||
|
"yale"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
"domain": "yale_august",
|
|
||||||
"name": "Yale August (US/Canada)",
|
|
||||||
"integrations": ["august", "august_ble"]
|
|
||||||
}
|
|
||||||
@@ -17,11 +17,6 @@ from homeassistant.const import (
|
|||||||
CONF_UNIQUE_ID,
|
CONF_UNIQUE_ID,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.selector import (
|
|
||||||
TextSelector,
|
|
||||||
TextSelectorConfig,
|
|
||||||
TextSelectorType,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ACCOUNT_ID,
|
ACCOUNT_ID,
|
||||||
@@ -71,15 +66,7 @@ class AdaxConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle the local step."""
|
"""Handle the local step."""
|
||||||
data_schema = vol.Schema(
|
data_schema = vol.Schema(
|
||||||
{
|
{vol.Required(WIFI_SSID): str, vol.Required(WIFI_PSWD): str}
|
||||||
vol.Required(WIFI_SSID): str,
|
|
||||||
vol.Required(WIFI_PSWD): TextSelector(
|
|
||||||
TextSelectorConfig(
|
|
||||||
type=TextSelectorType.PASSWORD,
|
|
||||||
autocomplete="current-password",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
if user_input is None:
|
if user_input is None:
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
|
|||||||
@@ -2,16 +2,14 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
SensorEntity,
|
SensorEntity,
|
||||||
SensorEntityDescription,
|
|
||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
)
|
)
|
||||||
from homeassistant.const import UnitOfEnergy, UnitOfTemperature
|
from homeassistant.const import UnitOfEnergy
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
@@ -22,74 +20,44 @@ from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
|||||||
from .coordinator import AdaxCloudCoordinator
|
from .coordinator import AdaxCloudCoordinator
|
||||||
|
|
||||||
|
|
||||||
@dataclass(kw_only=True, frozen=True)
|
|
||||||
class AdaxSensorDescription(SensorEntityDescription):
|
|
||||||
"""Describes Adax sensor entity."""
|
|
||||||
|
|
||||||
data_key: str
|
|
||||||
|
|
||||||
|
|
||||||
SENSORS: tuple[AdaxSensorDescription, ...] = (
|
|
||||||
AdaxSensorDescription(
|
|
||||||
key="temperature",
|
|
||||||
data_key="temperature",
|
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
|
||||||
suggested_display_precision=1,
|
|
||||||
),
|
|
||||||
AdaxSensorDescription(
|
|
||||||
key="energy",
|
|
||||||
data_key="energyWh",
|
|
||||||
device_class=SensorDeviceClass.ENERGY,
|
|
||||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
|
||||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
|
||||||
suggested_display_precision=3,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: AdaxConfigEntry,
|
entry: AdaxConfigEntry,
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Adax sensors with config flow."""
|
"""Set up the Adax energy sensors with config flow."""
|
||||||
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
||||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||||
|
|
||||||
# Create individual energy sensors for each device
|
# Create individual energy sensors for each device
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[
|
AdaxEnergySensor(cloud_coordinator, device_id)
|
||||||
AdaxSensor(cloud_coordinator, entity_description, device_id)
|
for device_id in cloud_coordinator.data
|
||||||
for device_id in cloud_coordinator.data
|
|
||||||
for entity_description in SENSORS
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AdaxSensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||||
"""Representation of an Adax sensor."""
|
"""Representation of an Adax energy sensor."""
|
||||||
|
|
||||||
entity_description: AdaxSensorDescription
|
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
_attr_translation_key = "energy"
|
||||||
|
_attr_device_class = SensorDeviceClass.ENERGY
|
||||||
|
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
|
||||||
|
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
||||||
|
_attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||||
|
_attr_suggested_display_precision = 3
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AdaxCloudCoordinator,
|
coordinator: AdaxCloudCoordinator,
|
||||||
entity_description: AdaxSensorDescription,
|
|
||||||
device_id: str,
|
device_id: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the energy sensor."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
self.entity_description = entity_description
|
|
||||||
self._device_id = device_id
|
self._device_id = device_id
|
||||||
room = coordinator.data[device_id]
|
room = coordinator.data[device_id]
|
||||||
|
|
||||||
self._attr_unique_id = (
|
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
|
||||||
f"{room['homeId']}_{device_id}_{self.entity_description.key}"
|
|
||||||
)
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, device_id)},
|
identifiers={(DOMAIN, device_id)},
|
||||||
name=room["name"],
|
name=room["name"],
|
||||||
@@ -100,14 +68,10 @@ class AdaxSensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
|||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Return True if entity is available."""
|
"""Return True if entity is available."""
|
||||||
return (
|
return (
|
||||||
super().available
|
super().available and "energyWh" in self.coordinator.data[self._device_id]
|
||||||
and self.entity_description.data_key
|
|
||||||
in self.coordinator.data[self._device_id]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_value(self) -> int | float | None:
|
def native_value(self) -> int:
|
||||||
"""Return the native value of the sensor."""
|
"""Return the native value of the sensor."""
|
||||||
return self.coordinator.data[self._device_id].get(
|
return int(self.coordinator.data[self._device_id]["energyWh"])
|
||||||
self.entity_description.data_key
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ generate_data:
|
|||||||
media:
|
media:
|
||||||
accept:
|
accept:
|
||||||
- "*"
|
- "*"
|
||||||
multiple: true
|
|
||||||
generate_image:
|
generate_image:
|
||||||
fields:
|
fields:
|
||||||
task_name:
|
task_name:
|
||||||
@@ -58,4 +57,3 @@ generate_image:
|
|||||||
media:
|
media:
|
||||||
accept:
|
accept:
|
||||||
- "*"
|
- "*"
|
||||||
multiple: true
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ from homeassistant.components.bluetooth import (
|
|||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_ADDRESS
|
from homeassistant.const import CONF_ADDRESS
|
||||||
|
|
||||||
from .const import DEVICE_MODEL, DOMAIN, MFCT_ID
|
from .const import DOMAIN, MFCT_ID
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -128,15 +128,15 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Confirm discovery."""
|
"""Confirm discovery."""
|
||||||
assert self._discovered_device is not None
|
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
if self._discovered_device.device.firmware.need_firmware_upgrade:
|
if (
|
||||||
|
self._discovered_device is not None
|
||||||
|
and self._discovered_device.device.firmware.need_firmware_upgrade
|
||||||
|
):
|
||||||
return self.async_abort(reason="firmware_upgrade_required")
|
return self.async_abort(reason="firmware_upgrade_required")
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self.context["title_placeholders"]["name"],
|
title=self.context["title_placeholders"]["name"], data={}
|
||||||
data={DEVICE_MODEL: self._discovered_device.device.model.value},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self._set_confirm_only()
|
self._set_confirm_only()
|
||||||
@@ -164,10 +164,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
self._discovered_device = discovery
|
self._discovered_device = discovery
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(title=discovery.name, data={})
|
||||||
title=discovery.name,
|
|
||||||
data={DEVICE_MODEL: discovery.device.model.value},
|
|
||||||
)
|
|
||||||
|
|
||||||
current_addresses = self._async_current_ids(include_ignore=False)
|
current_addresses = self._async_current_ids(include_ignore=False)
|
||||||
devices: list[BluetoothServiceInfoBleak] = []
|
devices: list[BluetoothServiceInfoBleak] = []
|
||||||
|
|||||||
@@ -1,16 +1,11 @@
|
|||||||
"""Constants for Airthings BLE."""
|
"""Constants for Airthings BLE."""
|
||||||
|
|
||||||
from airthings_ble import AirthingsDeviceType
|
|
||||||
|
|
||||||
DOMAIN = "airthings_ble"
|
DOMAIN = "airthings_ble"
|
||||||
MFCT_ID = 820
|
MFCT_ID = 820
|
||||||
|
|
||||||
VOLUME_BECQUEREL = "Bq/m³"
|
VOLUME_BECQUEREL = "Bq/m³"
|
||||||
VOLUME_PICOCURIE = "pCi/L"
|
VOLUME_PICOCURIE = "pCi/L"
|
||||||
|
|
||||||
DEVICE_MODEL = "device_model"
|
|
||||||
|
|
||||||
DEFAULT_SCAN_INTERVAL = 300
|
DEFAULT_SCAN_INTERVAL = 300
|
||||||
DEVICE_SPECIFIC_SCAN_INTERVAL = {AirthingsDeviceType.CORENTIUM_HOME_2.value: 1800}
|
|
||||||
|
|
||||||
MAX_RETRIES_AFTER_STARTUP = 5
|
MAX_RETRIES_AFTER_STARTUP = 5
|
||||||
|
|||||||
@@ -16,12 +16,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
|||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||||
|
|
||||||
from .const import (
|
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||||
DEFAULT_SCAN_INTERVAL,
|
|
||||||
DEVICE_MODEL,
|
|
||||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
|
||||||
DOMAIN,
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -39,18 +34,12 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
|||||||
self.airthings = AirthingsBluetoothDeviceData(
|
self.airthings = AirthingsBluetoothDeviceData(
|
||||||
_LOGGER, hass.config.units is METRIC_SYSTEM
|
_LOGGER, hass.config.units is METRIC_SYSTEM
|
||||||
)
|
)
|
||||||
|
|
||||||
device_model = entry.data.get(DEVICE_MODEL)
|
|
||||||
interval = DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
|
||||||
device_model, DEFAULT_SCAN_INTERVAL
|
|
||||||
)
|
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=timedelta(seconds=interval),
|
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_setup(self) -> None:
|
async def _async_setup(self) -> None:
|
||||||
@@ -69,29 +58,11 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
|||||||
)
|
)
|
||||||
self.ble_device = ble_device
|
self.ble_device = ble_device
|
||||||
|
|
||||||
if DEVICE_MODEL not in self.config_entry.data:
|
|
||||||
_LOGGER.debug("Fetching device info for migration")
|
|
||||||
try:
|
|
||||||
data = await self.airthings.update_device(self.ble_device)
|
|
||||||
except Exception as err:
|
|
||||||
raise UpdateFailed(
|
|
||||||
f"Unable to fetch data for migration: {err}"
|
|
||||||
) from err
|
|
||||||
|
|
||||||
self.hass.config_entries.async_update_entry(
|
|
||||||
self.config_entry,
|
|
||||||
data={**self.config_entry.data, DEVICE_MODEL: data.model.value},
|
|
||||||
)
|
|
||||||
self.update_interval = timedelta(
|
|
||||||
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
|
||||||
data.model.value, DEFAULT_SCAN_INTERVAL
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _async_update_data(self) -> AirthingsDevice:
|
async def _async_update_data(self) -> AirthingsDevice:
|
||||||
"""Get data from Airthings BLE."""
|
"""Get data from Airthings BLE."""
|
||||||
try:
|
try:
|
||||||
data = await self.airthings.update_device(self.ble_device)
|
data = await self.airthings.update_device(self.ble_device)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|||||||
@@ -58,10 +58,7 @@ from homeassistant.const import (
|
|||||||
from homeassistant.helpers import network
|
from homeassistant.helpers import network
|
||||||
from homeassistant.util import color as color_util, dt as dt_util
|
from homeassistant.util import color as color_util, dt as dt_util
|
||||||
from homeassistant.util.decorator import Registry
|
from homeassistant.util.decorator import Registry
|
||||||
from homeassistant.util.unit_conversion import (
|
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||||
TemperatureConverter,
|
|
||||||
TemperatureDeltaConverter,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .config import AbstractConfig
|
from .config import AbstractConfig
|
||||||
from .const import (
|
from .const import (
|
||||||
@@ -847,7 +844,7 @@ def temperature_from_object(
|
|||||||
temp -= 273.15
|
temp -= 273.15
|
||||||
|
|
||||||
if interval:
|
if interval:
|
||||||
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
|
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
|
||||||
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
from aioamazondevices.const.metadata import SENSOR_STATE_OFF
|
from aioamazondevices.api import AmazonDevice
|
||||||
from aioamazondevices.structures import AmazonDevice
|
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import (
|
from homeassistant.components.binary_sensor import (
|
||||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||||
|
|||||||
@@ -2,13 +2,12 @@
|
|||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonEchoApi
|
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||||
from aioamazondevices.exceptions import (
|
from aioamazondevices.exceptions import (
|
||||||
CannotAuthenticate,
|
CannotAuthenticate,
|
||||||
CannotConnect,
|
CannotConnect,
|
||||||
CannotRetrieveData,
|
CannotRetrieveData,
|
||||||
)
|
)
|
||||||
from aioamazondevices.structures import AmazonDevice
|
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
|||||||
@@ -2,10 +2,9 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import asdict
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aioamazondevices.structures import AmazonDevice
|
from aioamazondevices.api import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.diagnostics import async_redact_data
|
from homeassistant.components.diagnostics import async_redact_data
|
||||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||||
@@ -61,5 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
|||||||
"online": device.online,
|
"online": device.online,
|
||||||
"serial number": device.serial_number,
|
"serial number": device.serial_number,
|
||||||
"software version": device.software_version,
|
"software version": device.software_version,
|
||||||
"sensors": {key: asdict(sensor) for key, sensor in device.sensors.items()},
|
"sensors": device.sensors,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Defines a base Alexa Devices entity."""
|
"""Defines a base Alexa Devices entity."""
|
||||||
|
|
||||||
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
|
from aioamazondevices.api import AmazonDevice
|
||||||
from aioamazondevices.structures import AmazonDevice
|
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||||
|
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.entity import EntityDescription
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
|
|||||||
@@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioamazondevices"],
|
"loggers": ["aioamazondevices"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aioamazondevices==8.0.1"]
|
"requirements": ["aioamazondevices==6.5.5"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,9 +6,8 @@ from collections.abc import Awaitable, Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Final
|
from typing import Any, Final
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonEchoApi
|
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||||
from aioamazondevices.structures import AmazonDevice
|
|
||||||
|
|
||||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ from dataclasses import dataclass
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
from aioamazondevices.const.schedules import (
|
from aioamazondevices.api import AmazonDevice
|
||||||
|
from aioamazondevices.const import (
|
||||||
NOTIFICATION_ALARM,
|
NOTIFICATION_ALARM,
|
||||||
NOTIFICATION_REMINDER,
|
NOTIFICATION_REMINDER,
|
||||||
NOTIFICATION_TIMER,
|
NOTIFICATION_TIMER,
|
||||||
)
|
)
|
||||||
from aioamazondevices.structures import AmazonDevice
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"""Support for services."""
|
"""Support for services."""
|
||||||
|
|
||||||
from aioamazondevices.const.sounds import SOUNDS_LIST
|
from aioamazondevices.sounds import SOUNDS_LIST
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING, Any, Final
|
from typing import TYPE_CHECKING, Any, Final
|
||||||
|
|
||||||
from aioamazondevices.structures import AmazonDevice
|
from aioamazondevices.api import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.switch import (
|
from homeassistant.components.switch import (
|
||||||
DOMAIN as SWITCH_DOMAIN,
|
DOMAIN as SWITCH_DOMAIN,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||||
|
|
||||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||||
|
|||||||
@@ -9,14 +9,14 @@ from homeassistant.helpers import config_validation as cv
|
|||||||
|
|
||||||
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||||
from .services import async_setup_services
|
from .services import setup_services
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the Amber component."""
|
"""Set up the Amber component."""
|
||||||
async_setup_services(hass)
|
setup_services(hass)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from homeassistant.core import (
|
|||||||
ServiceCall,
|
ServiceCall,
|
||||||
ServiceResponse,
|
ServiceResponse,
|
||||||
SupportsResponse,
|
SupportsResponse,
|
||||||
callback,
|
|
||||||
)
|
)
|
||||||
from homeassistant.exceptions import ServiceValidationError
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||||
@@ -103,8 +102,7 @@ def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
@callback
|
def setup_services(hass: HomeAssistant) -> None:
|
||||||
def async_setup_services(hass: HomeAssistant) -> None:
|
|
||||||
"""Set up the services for the Amber integration."""
|
"""Set up the services for the Amber integration."""
|
||||||
|
|
||||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="daily_rain",
|
translation_key="daily_rain",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
@@ -150,7 +150,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
key=TYPE_LIGHTNING_PER_DAY,
|
key=TYPE_LIGHTNING_PER_DAY,
|
||||||
translation_key="lightning_strikes_per_day",
|
translation_key="lightning_strikes_per_day",
|
||||||
native_unit_of_measurement="strikes",
|
native_unit_of_measurement="strikes",
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
@@ -182,7 +182,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="monthly_rain",
|
translation_key="monthly_rain",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
@@ -229,7 +229,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="weekly_rain",
|
translation_key="weekly_rain",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
@@ -262,7 +262,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="yearly_rain",
|
translation_key="yearly_rain",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -39,11 +39,11 @@ from .const import (
|
|||||||
CONF_TURN_OFF_COMMAND,
|
CONF_TURN_OFF_COMMAND,
|
||||||
CONF_TURN_ON_COMMAND,
|
CONF_TURN_ON_COMMAND,
|
||||||
DEFAULT_ADB_SERVER_PORT,
|
DEFAULT_ADB_SERVER_PORT,
|
||||||
|
DEFAULT_DEVICE_CLASS,
|
||||||
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
||||||
DEFAULT_GET_SOURCES,
|
DEFAULT_GET_SOURCES,
|
||||||
DEFAULT_PORT,
|
DEFAULT_PORT,
|
||||||
DEFAULT_SCREENCAP_INTERVAL,
|
DEFAULT_SCREENCAP_INTERVAL,
|
||||||
DEVICE_AUTO,
|
|
||||||
DEVICE_CLASSES,
|
DEVICE_CLASSES,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
PROP_ETHMAC,
|
PROP_ETHMAC,
|
||||||
@@ -89,14 +89,8 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
data_schema = vol.Schema(
|
data_schema = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_HOST, default=host): str,
|
vol.Required(CONF_HOST, default=host): str,
|
||||||
vol.Required(CONF_DEVICE_CLASS, default=DEVICE_AUTO): SelectSelector(
|
vol.Required(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
|
||||||
SelectSelectorConfig(
|
DEVICE_CLASSES
|
||||||
options=[
|
|
||||||
SelectOptionDict(value=k, label=v)
|
|
||||||
for k, v in DEVICE_CLASSES.items()
|
|
||||||
],
|
|
||||||
translation_key="device_class",
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -15,19 +15,15 @@ CONF_TURN_OFF_COMMAND = "turn_off_command"
|
|||||||
CONF_TURN_ON_COMMAND = "turn_on_command"
|
CONF_TURN_ON_COMMAND = "turn_on_command"
|
||||||
|
|
||||||
DEFAULT_ADB_SERVER_PORT = 5037
|
DEFAULT_ADB_SERVER_PORT = 5037
|
||||||
|
DEFAULT_DEVICE_CLASS = "auto"
|
||||||
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
||||||
DEFAULT_GET_SOURCES = True
|
DEFAULT_GET_SOURCES = True
|
||||||
DEFAULT_PORT = 5555
|
DEFAULT_PORT = 5555
|
||||||
DEFAULT_SCREENCAP_INTERVAL = 5
|
DEFAULT_SCREENCAP_INTERVAL = 5
|
||||||
|
|
||||||
DEVICE_AUTO = "auto"
|
|
||||||
DEVICE_ANDROIDTV = "androidtv"
|
DEVICE_ANDROIDTV = "androidtv"
|
||||||
DEVICE_FIRETV = "firetv"
|
DEVICE_FIRETV = "firetv"
|
||||||
DEVICE_CLASSES = {
|
DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
|
||||||
DEVICE_AUTO: "auto",
|
|
||||||
DEVICE_ANDROIDTV: "Android TV",
|
|
||||||
DEVICE_FIRETV: "Fire TV",
|
|
||||||
}
|
|
||||||
|
|
||||||
PROP_ETHMAC = "ethmac"
|
PROP_ETHMAC = "ethmac"
|
||||||
PROP_SERIALNO = "serialno"
|
PROP_SERIALNO = "serialno"
|
||||||
|
|||||||
@@ -65,13 +65,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"selector": {
|
|
||||||
"device_class": {
|
|
||||||
"options": {
|
|
||||||
"auto": "Auto-detect device type"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"services": {
|
"services": {
|
||||||
"adb_command": {
|
"adb_command": {
|
||||||
"description": "Sends an ADB command to an Android / Fire TV device.",
|
"description": "Sends an ADB command to an Android / Fire TV device.",
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ from .const import (
|
|||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
)
|
)
|
||||||
|
|
||||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
PLATFORMS = (Platform.CONVERSATION,)
|
||||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||||
|
|||||||
@@ -1,80 +0,0 @@
|
|||||||
"""AI Task integration for Anthropic."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from json import JSONDecodeError
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from homeassistant.components import ai_task, conversation
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|
||||||
from homeassistant.util.json import json_loads
|
|
||||||
|
|
||||||
from .entity import AnthropicBaseLLMEntity
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
config_entry: ConfigEntry,
|
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
|
||||||
) -> None:
|
|
||||||
"""Set up AI Task entities."""
|
|
||||||
for subentry in config_entry.subentries.values():
|
|
||||||
if subentry.subentry_type != "ai_task_data":
|
|
||||||
continue
|
|
||||||
|
|
||||||
async_add_entities(
|
|
||||||
[AnthropicTaskEntity(config_entry, subentry)],
|
|
||||||
config_subentry_id=subentry.subentry_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AnthropicTaskEntity(
|
|
||||||
ai_task.AITaskEntity,
|
|
||||||
AnthropicBaseLLMEntity,
|
|
||||||
):
|
|
||||||
"""Anthropic AI Task entity."""
|
|
||||||
|
|
||||||
_attr_supported_features = (
|
|
||||||
ai_task.AITaskEntityFeature.GENERATE_DATA
|
|
||||||
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _async_generate_data(
|
|
||||||
self,
|
|
||||||
task: ai_task.GenDataTask,
|
|
||||||
chat_log: conversation.ChatLog,
|
|
||||||
) -> ai_task.GenDataTaskResult:
|
|
||||||
"""Handle a generate data task."""
|
|
||||||
await self._async_handle_chat_log(chat_log, task.name, task.structure)
|
|
||||||
|
|
||||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
|
||||||
raise HomeAssistantError(
|
|
||||||
"Last content in chat log is not an AssistantContent"
|
|
||||||
)
|
|
||||||
|
|
||||||
text = chat_log.content[-1].content or ""
|
|
||||||
|
|
||||||
if not task.structure:
|
|
||||||
return ai_task.GenDataTaskResult(
|
|
||||||
conversation_id=chat_log.conversation_id,
|
|
||||||
data=text,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
data = json_loads(text)
|
|
||||||
except JSONDecodeError as err:
|
|
||||||
_LOGGER.error(
|
|
||||||
"Failed to parse JSON response: %s. Response: %s",
|
|
||||||
err,
|
|
||||||
text,
|
|
||||||
)
|
|
||||||
raise HomeAssistantError("Error with Claude structured response") from err
|
|
||||||
|
|
||||||
return ai_task.GenDataTaskResult(
|
|
||||||
conversation_id=chat_log.conversation_id,
|
|
||||||
data=data,
|
|
||||||
)
|
|
||||||
@@ -2,10 +2,11 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, cast
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -37,7 +38,6 @@ from homeassistant.helpers.selector import (
|
|||||||
SelectSelectorConfig,
|
SelectSelectorConfig,
|
||||||
TemplateSelector,
|
TemplateSelector,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.typing import VolDictType
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_CHAT_MODEL,
|
CONF_CHAT_MODEL,
|
||||||
@@ -53,10 +53,8 @@ from .const import (
|
|||||||
CONF_WEB_SEARCH_REGION,
|
CONF_WEB_SEARCH_REGION,
|
||||||
CONF_WEB_SEARCH_TIMEZONE,
|
CONF_WEB_SEARCH_TIMEZONE,
|
||||||
CONF_WEB_SEARCH_USER_LOCATION,
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
DEFAULT_AI_TASK_NAME,
|
|
||||||
DEFAULT_CONVERSATION_NAME,
|
DEFAULT_CONVERSATION_NAME,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
NON_THINKING_MODELS,
|
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
RECOMMENDED_TEMPERATURE,
|
RECOMMENDED_TEMPERATURE,
|
||||||
@@ -75,16 +73,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
RECOMMENDED_OPTIONS = {
|
||||||
CONF_RECOMMENDED: True,
|
CONF_RECOMMENDED: True,
|
||||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||||
}
|
}
|
||||||
|
|
||||||
RECOMMENDED_AI_TASK_OPTIONS = {
|
|
||||||
CONF_RECOMMENDED: True,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||||
"""Validate the user input allows us to connect.
|
"""Validate the user input allows us to connect.
|
||||||
@@ -107,7 +101,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle the initial step."""
|
"""Handle the initial step."""
|
||||||
errors: dict[str, str] = {}
|
errors = {}
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
self._async_abort_entries_match(user_input)
|
self._async_abort_entries_match(user_input)
|
||||||
@@ -135,16 +129,10 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
subentries=[
|
subentries=[
|
||||||
{
|
{
|
||||||
"subentry_type": "conversation",
|
"subentry_type": "conversation",
|
||||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
"data": RECOMMENDED_OPTIONS,
|
||||||
"title": DEFAULT_CONVERSATION_NAME,
|
"title": DEFAULT_CONVERSATION_NAME,
|
||||||
"unique_id": None,
|
"unique_id": None,
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"subentry_type": "ai_task_data",
|
|
||||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
|
||||||
"title": DEFAULT_AI_TASK_NAME,
|
|
||||||
"unique_id": None,
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -158,240 +146,101 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
cls, config_entry: ConfigEntry
|
cls, config_entry: ConfigEntry
|
||||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||||
"""Return subentries supported by this integration."""
|
"""Return subentries supported by this integration."""
|
||||||
return {
|
return {"conversation": ConversationSubentryFlowHandler}
|
||||||
"conversation": ConversationSubentryFlowHandler,
|
|
||||||
"ai_task_data": ConversationSubentryFlowHandler,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||||
"""Flow for managing conversation subentries."""
|
"""Flow for managing conversation subentries."""
|
||||||
|
|
||||||
options: dict[str, Any]
|
last_rendered_recommended = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _is_new(self) -> bool:
|
def _is_new(self) -> bool:
|
||||||
"""Return if this is a new subentry."""
|
"""Return if this is a new subentry."""
|
||||||
return self.source == "user"
|
return self.source == "user"
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_set_options(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> SubentryFlowResult:
|
) -> SubentryFlowResult:
|
||||||
"""Add a subentry."""
|
"""Set conversation options."""
|
||||||
if self._subentry_type == "ai_task_data":
|
|
||||||
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
|
|
||||||
else:
|
|
||||||
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
|
||||||
return await self.async_step_init()
|
|
||||||
|
|
||||||
async def async_step_reconfigure(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> SubentryFlowResult:
|
|
||||||
"""Handle reconfiguration of a subentry."""
|
|
||||||
self.options = self._get_reconfigure_subentry().data.copy()
|
|
||||||
return await self.async_step_init()
|
|
||||||
|
|
||||||
async def async_step_init(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> SubentryFlowResult:
|
|
||||||
"""Set initial options."""
|
|
||||||
# abort if entry is not loaded
|
# abort if entry is not loaded
|
||||||
if self._get_entry().state != ConfigEntryState.LOADED:
|
if self._get_entry().state != ConfigEntryState.LOADED:
|
||||||
return self.async_abort(reason="entry_not_loaded")
|
return self.async_abort(reason="entry_not_loaded")
|
||||||
|
|
||||||
hass_apis: list[SelectOptionDict] = [
|
|
||||||
SelectOptionDict(
|
|
||||||
label=api.name,
|
|
||||||
value=api.id,
|
|
||||||
)
|
|
||||||
for api in llm.async_get_apis(self.hass)
|
|
||||||
]
|
|
||||||
if (suggested_llm_apis := self.options.get(CONF_LLM_HASS_API)) and isinstance(
|
|
||||||
suggested_llm_apis, str
|
|
||||||
):
|
|
||||||
self.options[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
|
||||||
|
|
||||||
step_schema: VolDictType = {}
|
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
if self._is_new:
|
if user_input is None:
|
||||||
if self._subentry_type == "ai_task_data":
|
if self._is_new:
|
||||||
default_name = DEFAULT_AI_TASK_NAME
|
options = RECOMMENDED_OPTIONS.copy()
|
||||||
else:
|
else:
|
||||||
default_name = DEFAULT_CONVERSATION_NAME
|
# If this is a reconfiguration, we need to copy the existing options
|
||||||
step_schema[vol.Required(CONF_NAME, default=default_name)] = str
|
# so that we can show the current values in the form.
|
||||||
|
options = self._get_reconfigure_subentry().data.copy()
|
||||||
|
|
||||||
if self._subentry_type == "conversation":
|
self.last_rendered_recommended = cast(
|
||||||
step_schema.update(
|
bool, options.get(CONF_RECOMMENDED, False)
|
||||||
{
|
|
||||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
|
||||||
vol.Optional(
|
|
||||||
CONF_LLM_HASS_API,
|
|
||||||
): SelectSelector(
|
|
||||||
SelectSelectorConfig(options=hass_apis, multiple=True)
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
step_schema[
|
elif user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||||
vol.Required(
|
|
||||||
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
|
|
||||||
)
|
|
||||||
] = bool
|
|
||||||
|
|
||||||
if user_input is not None:
|
|
||||||
if not user_input.get(CONF_LLM_HASS_API):
|
if not user_input.get(CONF_LLM_HASS_API):
|
||||||
user_input.pop(CONF_LLM_HASS_API, None)
|
user_input.pop(CONF_LLM_HASS_API, None)
|
||||||
|
if user_input.get(
|
||||||
if user_input[CONF_RECOMMENDED]:
|
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||||
if not errors:
|
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||||
if self._is_new:
|
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||||
return self.async_create_entry(
|
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH):
|
||||||
title=user_input.pop(CONF_NAME),
|
model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
data=user_input,
|
if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||||
)
|
errors[CONF_WEB_SEARCH] = "web_search_unsupported_model"
|
||||||
return self.async_update_and_abort(
|
elif user_input.get(
|
||||||
self._get_entry(),
|
|
||||||
self._get_reconfigure_subentry(),
|
|
||||||
data=user_input,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.options.update(user_input)
|
|
||||||
if (
|
|
||||||
CONF_LLM_HASS_API in self.options
|
|
||||||
and CONF_LLM_HASS_API not in user_input
|
|
||||||
):
|
|
||||||
self.options.pop(CONF_LLM_HASS_API)
|
|
||||||
if not errors:
|
|
||||||
return await self.async_step_advanced()
|
|
||||||
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="init",
|
|
||||||
data_schema=self.add_suggested_values_to_schema(
|
|
||||||
vol.Schema(step_schema), self.options
|
|
||||||
),
|
|
||||||
errors=errors or None,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_step_advanced(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> SubentryFlowResult:
|
|
||||||
"""Manage advanced options."""
|
|
||||||
errors: dict[str, str] = {}
|
|
||||||
|
|
||||||
step_schema: VolDictType = {
|
|
||||||
vol.Optional(
|
|
||||||
CONF_CHAT_MODEL,
|
|
||||||
default=RECOMMENDED_CHAT_MODEL,
|
|
||||||
): str,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_MAX_TOKENS,
|
|
||||||
default=RECOMMENDED_MAX_TOKENS,
|
|
||||||
): int,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_TEMPERATURE,
|
|
||||||
default=RECOMMENDED_TEMPERATURE,
|
|
||||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
|
||||||
}
|
|
||||||
|
|
||||||
if user_input is not None:
|
|
||||||
self.options.update(user_input)
|
|
||||||
|
|
||||||
if not errors:
|
|
||||||
return await self.async_step_model()
|
|
||||||
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="advanced",
|
|
||||||
data_schema=self.add_suggested_values_to_schema(
|
|
||||||
vol.Schema(step_schema), self.options
|
|
||||||
),
|
|
||||||
errors=errors,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_step_model(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> SubentryFlowResult:
|
|
||||||
"""Manage model-specific options."""
|
|
||||||
errors: dict[str, str] = {}
|
|
||||||
|
|
||||||
step_schema: VolDictType = {}
|
|
||||||
|
|
||||||
model = self.options[CONF_CHAT_MODEL]
|
|
||||||
|
|
||||||
if not model.startswith(tuple(NON_THINKING_MODELS)):
|
|
||||||
step_schema[
|
|
||||||
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
|
|
||||||
] = vol.All(
|
|
||||||
NumberSelector(
|
|
||||||
NumberSelectorConfig(
|
|
||||||
min=0,
|
|
||||||
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
|
||||||
)
|
|
||||||
),
|
|
||||||
vol.Coerce(int),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.options.pop(CONF_THINKING_BUDGET, None)
|
|
||||||
|
|
||||||
if not model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
|
||||||
step_schema.update(
|
|
||||||
{
|
|
||||||
vol.Optional(
|
|
||||||
CONF_WEB_SEARCH,
|
|
||||||
default=RECOMMENDED_WEB_SEARCH,
|
|
||||||
): bool,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_WEB_SEARCH_MAX_USES,
|
|
||||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
|
||||||
): int,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_WEB_SEARCH_USER_LOCATION,
|
|
||||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
|
||||||
): bool,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.options.pop(CONF_WEB_SEARCH, None)
|
|
||||||
self.options.pop(CONF_WEB_SEARCH_MAX_USES, None)
|
|
||||||
self.options.pop(CONF_WEB_SEARCH_USER_LOCATION, None)
|
|
||||||
|
|
||||||
self.options.pop(CONF_WEB_SEARCH_CITY, None)
|
|
||||||
self.options.pop(CONF_WEB_SEARCH_REGION, None)
|
|
||||||
self.options.pop(CONF_WEB_SEARCH_COUNTRY, None)
|
|
||||||
self.options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
|
||||||
|
|
||||||
if not step_schema:
|
|
||||||
user_input = {}
|
|
||||||
|
|
||||||
if user_input is not None:
|
|
||||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH) and not errors:
|
|
||||||
if user_input.get(
|
|
||||||
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||||
):
|
):
|
||||||
user_input.update(await self._get_location_data())
|
user_input.update(await self._get_location_data())
|
||||||
|
|
||||||
self.options.update(user_input)
|
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
if self._is_new:
|
if self._is_new:
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self.options.pop(CONF_NAME),
|
title=user_input.pop(CONF_NAME),
|
||||||
data=self.options,
|
data=user_input,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_update_and_abort(
|
return self.async_update_and_abort(
|
||||||
self._get_entry(),
|
self._get_entry(),
|
||||||
self._get_reconfigure_subentry(),
|
self._get_reconfigure_subentry(),
|
||||||
data=self.options,
|
data=user_input,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
options = user_input
|
||||||
step_id="model",
|
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||||
data_schema=self.add_suggested_values_to_schema(
|
else:
|
||||||
vol.Schema(step_schema), self.options
|
# Re-render the options again, now with the recommended options shown/hidden
|
||||||
|
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||||
|
|
||||||
|
options = {
|
||||||
|
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||||
|
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||||
|
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
||||||
|
}
|
||||||
|
|
||||||
|
suggested_values = options.copy()
|
||||||
|
if not suggested_values.get(CONF_PROMPT):
|
||||||
|
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||||
|
if (
|
||||||
|
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
|
||||||
|
) and isinstance(suggested_llm_apis, str):
|
||||||
|
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||||
|
|
||||||
|
schema = self.add_suggested_values_to_schema(
|
||||||
|
vol.Schema(
|
||||||
|
anthropic_config_option_schema(self.hass, self._is_new, options)
|
||||||
),
|
),
|
||||||
|
suggested_values,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="set_options",
|
||||||
|
data_schema=schema,
|
||||||
errors=errors or None,
|
errors=errors or None,
|
||||||
last_step=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _get_location_data(self) -> dict[str, str]:
|
async def _get_location_data(self) -> dict[str, str]:
|
||||||
@@ -455,3 +304,77 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
|||||||
_LOGGER.debug("Location data: %s", location_data)
|
_LOGGER.debug("Location data: %s", location_data)
|
||||||
|
|
||||||
return location_data
|
return location_data
|
||||||
|
|
||||||
|
async_step_user = async_step_set_options
|
||||||
|
async_step_reconfigure = async_step_set_options
|
||||||
|
|
||||||
|
|
||||||
|
def anthropic_config_option_schema(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
is_new: bool,
|
||||||
|
options: Mapping[str, Any],
|
||||||
|
) -> dict:
|
||||||
|
"""Return a schema for Anthropic completion options."""
|
||||||
|
hass_apis: list[SelectOptionDict] = [
|
||||||
|
SelectOptionDict(
|
||||||
|
label=api.name,
|
||||||
|
value=api.id,
|
||||||
|
)
|
||||||
|
for api in llm.async_get_apis(hass)
|
||||||
|
]
|
||||||
|
|
||||||
|
if is_new:
|
||||||
|
schema: dict[vol.Required | vol.Optional, Any] = {
|
||||||
|
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
schema = {}
|
||||||
|
|
||||||
|
schema.update(
|
||||||
|
{
|
||||||
|
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||||
|
vol.Optional(
|
||||||
|
CONF_LLM_HASS_API,
|
||||||
|
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||||
|
vol.Required(
|
||||||
|
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||||
|
): bool,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if options.get(CONF_RECOMMENDED):
|
||||||
|
return schema
|
||||||
|
|
||||||
|
schema.update(
|
||||||
|
{
|
||||||
|
vol.Optional(
|
||||||
|
CONF_CHAT_MODEL,
|
||||||
|
default=RECOMMENDED_CHAT_MODEL,
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_MAX_TOKENS,
|
||||||
|
default=RECOMMENDED_MAX_TOKENS,
|
||||||
|
): int,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_TEMPERATURE,
|
||||||
|
default=RECOMMENDED_TEMPERATURE,
|
||||||
|
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||||
|
vol.Optional(
|
||||||
|
CONF_THINKING_BUDGET,
|
||||||
|
default=RECOMMENDED_THINKING_BUDGET,
|
||||||
|
): int,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH,
|
||||||
|
): bool,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH_MAX_USES,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||||
|
): int,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||||
|
): bool,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return schema
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ DOMAIN = "anthropic"
|
|||||||
LOGGER = logging.getLogger(__package__)
|
LOGGER = logging.getLogger(__package__)
|
||||||
|
|
||||||
DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
||||||
DEFAULT_AI_TASK_NAME = "Claude AI Task"
|
|
||||||
|
|
||||||
CONF_RECOMMENDED = "recommended"
|
CONF_RECOMMENDED = "recommended"
|
||||||
CONF_PROMPT = "prompt"
|
CONF_PROMPT = "prompt"
|
||||||
|
|||||||
@@ -1,24 +1,17 @@
|
|||||||
"""Base entity for Anthropic."""
|
"""Base entity for Anthropic."""
|
||||||
|
|
||||||
import base64
|
|
||||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
import json
|
import json
|
||||||
from mimetypes import guess_file_type
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
from anthropic import AsyncStream
|
from anthropic import AsyncStream
|
||||||
from anthropic.types import (
|
from anthropic.types import (
|
||||||
Base64ImageSourceParam,
|
|
||||||
Base64PDFSourceParam,
|
|
||||||
CitationsDelta,
|
CitationsDelta,
|
||||||
CitationsWebSearchResultLocation,
|
CitationsWebSearchResultLocation,
|
||||||
CitationWebSearchResultLocationParam,
|
CitationWebSearchResultLocationParam,
|
||||||
ContentBlockParam,
|
ContentBlockParam,
|
||||||
DocumentBlockParam,
|
|
||||||
ImageBlockParam,
|
|
||||||
InputJSONDelta,
|
InputJSONDelta,
|
||||||
MessageDeltaUsage,
|
MessageDeltaUsage,
|
||||||
MessageParam,
|
MessageParam,
|
||||||
@@ -44,9 +37,6 @@ from anthropic.types import (
|
|||||||
ThinkingConfigDisabledParam,
|
ThinkingConfigDisabledParam,
|
||||||
ThinkingConfigEnabledParam,
|
ThinkingConfigEnabledParam,
|
||||||
ThinkingDelta,
|
ThinkingDelta,
|
||||||
ToolChoiceAnyParam,
|
|
||||||
ToolChoiceAutoParam,
|
|
||||||
ToolChoiceToolParam,
|
|
||||||
ToolParam,
|
ToolParam,
|
||||||
ToolResultBlockParam,
|
ToolResultBlockParam,
|
||||||
ToolUnionParam,
|
ToolUnionParam,
|
||||||
@@ -60,16 +50,13 @@ from anthropic.types import (
|
|||||||
WebSearchToolResultError,
|
WebSearchToolResultError,
|
||||||
)
|
)
|
||||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||||
import voluptuous as vol
|
|
||||||
from voluptuous_openapi import convert
|
from voluptuous_openapi import convert
|
||||||
|
|
||||||
from homeassistant.components import conversation
|
from homeassistant.components import conversation
|
||||||
from homeassistant.config_entries import ConfigSubentry
|
from homeassistant.config_entries import ConfigSubentry
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import device_registry as dr, llm
|
from homeassistant.helpers import device_registry as dr, llm
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
from homeassistant.util import slugify
|
|
||||||
|
|
||||||
from . import AnthropicConfigEntry
|
from . import AnthropicConfigEntry
|
||||||
from .const import (
|
from .const import (
|
||||||
@@ -334,7 +321,6 @@ def _convert_content(
|
|||||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
stream: AsyncStream[MessageStreamEvent],
|
stream: AsyncStream[MessageStreamEvent],
|
||||||
output_tool: str | None = None,
|
|
||||||
) -> AsyncGenerator[
|
) -> AsyncGenerator[
|
||||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||||
]:
|
]:
|
||||||
@@ -395,16 +381,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
input="",
|
input="",
|
||||||
)
|
)
|
||||||
current_tool_args = ""
|
current_tool_args = ""
|
||||||
if response.content_block.name == output_tool:
|
|
||||||
if first_block or content_details.has_content():
|
|
||||||
if content_details.has_citations():
|
|
||||||
content_details.delete_empty()
|
|
||||||
yield {"native": content_details}
|
|
||||||
content_details = ContentDetails()
|
|
||||||
content_details.add_citation_detail()
|
|
||||||
yield {"role": "assistant"}
|
|
||||||
has_native = False
|
|
||||||
first_block = False
|
|
||||||
elif isinstance(response.content_block, TextBlock):
|
elif isinstance(response.content_block, TextBlock):
|
||||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||||
first_block
|
first_block
|
||||||
@@ -495,16 +471,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
first_block = True
|
first_block = True
|
||||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||||
if isinstance(response.delta, InputJSONDelta):
|
if isinstance(response.delta, InputJSONDelta):
|
||||||
if (
|
current_tool_args += response.delta.partial_json
|
||||||
current_tool_block is not None
|
|
||||||
and current_tool_block["name"] == output_tool
|
|
||||||
):
|
|
||||||
content_details.citation_details[-1].length += len(
|
|
||||||
response.delta.partial_json
|
|
||||||
)
|
|
||||||
yield {"content": response.delta.partial_json}
|
|
||||||
else:
|
|
||||||
current_tool_args += response.delta.partial_json
|
|
||||||
elif isinstance(response.delta, TextDelta):
|
elif isinstance(response.delta, TextDelta):
|
||||||
content_details.citation_details[-1].length += len(response.delta.text)
|
content_details.citation_details[-1].length += len(response.delta.text)
|
||||||
yield {"content": response.delta.text}
|
yield {"content": response.delta.text}
|
||||||
@@ -523,9 +490,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
content_details.add_citation(response.delta.citation)
|
content_details.add_citation(response.delta.citation)
|
||||||
elif isinstance(response, RawContentBlockStopEvent):
|
elif isinstance(response, RawContentBlockStopEvent):
|
||||||
if current_tool_block is not None:
|
if current_tool_block is not None:
|
||||||
if current_tool_block["name"] == output_tool:
|
|
||||||
current_tool_block = None
|
|
||||||
continue
|
|
||||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||||
current_tool_block["input"] = tool_args
|
current_tool_block["input"] = tool_args
|
||||||
yield {
|
yield {
|
||||||
@@ -593,8 +557,6 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
async def _async_handle_chat_log(
|
async def _async_handle_chat_log(
|
||||||
self,
|
self,
|
||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
structure_name: str | None = None,
|
|
||||||
structure: vol.Schema | None = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Generate an answer for the chat log."""
|
"""Generate an answer for the chat log."""
|
||||||
options = self.subentry.data
|
options = self.subentry.data
|
||||||
@@ -651,74 +613,6 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
}
|
}
|
||||||
tools.append(web_search)
|
tools.append(web_search)
|
||||||
|
|
||||||
# Handle attachments by adding them to the last user message
|
|
||||||
last_content = chat_log.content[-1]
|
|
||||||
if last_content.role == "user" and last_content.attachments:
|
|
||||||
last_message = messages[-1]
|
|
||||||
if last_message["role"] != "user":
|
|
||||||
raise HomeAssistantError(
|
|
||||||
"Last message must be a user message to add attachments"
|
|
||||||
)
|
|
||||||
if isinstance(last_message["content"], str):
|
|
||||||
last_message["content"] = [
|
|
||||||
TextBlockParam(type="text", text=last_message["content"])
|
|
||||||
]
|
|
||||||
last_message["content"].extend( # type: ignore[union-attr]
|
|
||||||
await async_prepare_files_for_prompt(
|
|
||||||
self.hass, [(a.path, a.mime_type) for a in last_content.attachments]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if structure and structure_name:
|
|
||||||
structure_name = slugify(structure_name)
|
|
||||||
if model_args["thinking"]["type"] == "disabled":
|
|
||||||
if not tools:
|
|
||||||
# Simplest case: no tools and no extended thinking
|
|
||||||
# Add a tool and force its use
|
|
||||||
model_args["tool_choice"] = ToolChoiceToolParam(
|
|
||||||
type="tool",
|
|
||||||
name=structure_name,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Second case: tools present but no extended thinking
|
|
||||||
# Allow the model to use any tool but not text response
|
|
||||||
# The model should know to use the right tool by its description
|
|
||||||
model_args["tool_choice"] = ToolChoiceAnyParam(
|
|
||||||
type="any",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Extended thinking is enabled. With extended thinking, we cannot
|
|
||||||
# force tool use or disable text responses, so we add a hint to the
|
|
||||||
# system prompt instead. With extended thinking, the model should be
|
|
||||||
# smart enough to use the tool.
|
|
||||||
model_args["tool_choice"] = ToolChoiceAutoParam(
|
|
||||||
type="auto",
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(model_args["system"], str):
|
|
||||||
model_args["system"] = [
|
|
||||||
TextBlockParam(type="text", text=model_args["system"])
|
|
||||||
]
|
|
||||||
model_args["system"].append( # type: ignore[union-attr]
|
|
||||||
TextBlockParam(
|
|
||||||
type="text",
|
|
||||||
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
tools.append(
|
|
||||||
ToolParam(
|
|
||||||
name=structure_name,
|
|
||||||
description="Use this tool to reply to the user",
|
|
||||||
input_schema=convert(
|
|
||||||
structure,
|
|
||||||
custom_serializer=chat_log.llm_api.custom_serializer
|
|
||||||
if chat_log.llm_api
|
|
||||||
else llm.selector_serializer,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if tools:
|
if tools:
|
||||||
model_args["tools"] = tools
|
model_args["tools"] = tools
|
||||||
|
|
||||||
@@ -735,11 +629,7 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
content
|
content
|
||||||
async for content in chat_log.async_add_delta_content_stream(
|
async for content in chat_log.async_add_delta_content_stream(
|
||||||
self.entity_id,
|
self.entity_id,
|
||||||
_transform_stream(
|
_transform_stream(chat_log, stream),
|
||||||
chat_log,
|
|
||||||
stream,
|
|
||||||
output_tool=structure_name if structure else None,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -751,59 +641,3 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
|
|
||||||
if not chat_log.unresponded_tool_results:
|
if not chat_log.unresponded_tool_results:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
async def async_prepare_files_for_prompt(
|
|
||||||
hass: HomeAssistant, files: list[tuple[Path, str | None]]
|
|
||||||
) -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
|
||||||
"""Append files to a prompt.
|
|
||||||
|
|
||||||
Caller needs to ensure that the files are allowed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def append_files_to_content() -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
|
||||||
content: list[ImageBlockParam | DocumentBlockParam] = []
|
|
||||||
|
|
||||||
for file_path, mime_type in files:
|
|
||||||
if not file_path.exists():
|
|
||||||
raise HomeAssistantError(f"`{file_path}` does not exist")
|
|
||||||
|
|
||||||
if mime_type is None:
|
|
||||||
mime_type = guess_file_type(file_path)[0]
|
|
||||||
|
|
||||||
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
|
|
||||||
raise HomeAssistantError(
|
|
||||||
"Only images and PDF are supported by the Anthropic API,"
|
|
||||||
f"`{file_path}` is not an image file or PDF"
|
|
||||||
)
|
|
||||||
if mime_type == "image/jpg":
|
|
||||||
mime_type = "image/jpeg"
|
|
||||||
|
|
||||||
base64_file = base64.b64encode(file_path.read_bytes()).decode("utf-8")
|
|
||||||
|
|
||||||
if mime_type.startswith("image/"):
|
|
||||||
content.append(
|
|
||||||
ImageBlockParam(
|
|
||||||
type="image",
|
|
||||||
source=Base64ImageSourceParam(
|
|
||||||
type="base64",
|
|
||||||
media_type=mime_type, # type: ignore[typeddict-item]
|
|
||||||
data=base64_file,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
elif mime_type.startswith("application/pdf"):
|
|
||||||
content.append(
|
|
||||||
DocumentBlockParam(
|
|
||||||
type="document",
|
|
||||||
source=Base64PDFSourceParam(
|
|
||||||
type="base64",
|
|
||||||
media_type=mime_type, # type: ignore[typeddict-item]
|
|
||||||
data=base64_file,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return content
|
|
||||||
|
|
||||||
return await hass.async_add_executor_job(append_files_to_content)
|
|
||||||
|
|||||||
@@ -18,94 +18,43 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"config_subentries": {
|
"config_subentries": {
|
||||||
"ai_task_data": {
|
|
||||||
"abort": {
|
|
||||||
"entry_not_loaded": "[%key:component::anthropic::config_subentries::conversation::abort::entry_not_loaded%]",
|
|
||||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
|
||||||
},
|
|
||||||
"entry_type": "AI task",
|
|
||||||
"initiate_flow": {
|
|
||||||
"reconfigure": "Reconfigure AI task",
|
|
||||||
"user": "Add AI task"
|
|
||||||
},
|
|
||||||
"step": {
|
|
||||||
"advanced": {
|
|
||||||
"data": {
|
|
||||||
"chat_model": "[%key:common::generic::model%]",
|
|
||||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
|
|
||||||
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
|
|
||||||
},
|
|
||||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
|
|
||||||
},
|
|
||||||
"init": {
|
|
||||||
"data": {
|
|
||||||
"name": "[%key:common::config_flow::data::name%]",
|
|
||||||
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
|
|
||||||
},
|
|
||||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
|
|
||||||
},
|
|
||||||
"model": {
|
|
||||||
"data": {
|
|
||||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
|
|
||||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
|
|
||||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
|
|
||||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
|
|
||||||
},
|
|
||||||
"data_description": {
|
|
||||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
|
|
||||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
|
|
||||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
|
|
||||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
|
|
||||||
},
|
|
||||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::model::title%]"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"conversation": {
|
"conversation": {
|
||||||
"abort": {
|
"abort": {
|
||||||
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
|
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
|
||||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||||
},
|
},
|
||||||
"entry_type": "Conversation agent",
|
"entry_type": "Conversation agent",
|
||||||
|
|
||||||
|
"error": {
|
||||||
|
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.",
|
||||||
|
"web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search."
|
||||||
|
},
|
||||||
"initiate_flow": {
|
"initiate_flow": {
|
||||||
"reconfigure": "Reconfigure conversation agent",
|
"reconfigure": "Reconfigure conversation agent",
|
||||||
"user": "Add conversation agent"
|
"user": "Add conversation agent"
|
||||||
},
|
},
|
||||||
"step": {
|
"step": {
|
||||||
"advanced": {
|
"set_options": {
|
||||||
"data": {
|
"data": {
|
||||||
"chat_model": "[%key:common::generic::model%]",
|
"chat_model": "[%key:common::generic::model%]",
|
||||||
"max_tokens": "Maximum tokens to return in response",
|
|
||||||
"temperature": "Temperature"
|
|
||||||
},
|
|
||||||
"title": "Advanced settings"
|
|
||||||
},
|
|
||||||
"init": {
|
|
||||||
"data": {
|
|
||||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||||
|
"max_tokens": "Maximum tokens to return in response",
|
||||||
"name": "[%key:common::config_flow::data::name%]",
|
"name": "[%key:common::config_flow::data::name%]",
|
||||||
"prompt": "[%key:common::config_flow::data::prompt%]",
|
"prompt": "[%key:common::config_flow::data::prompt%]",
|
||||||
"recommended": "Recommended model settings"
|
"recommended": "Recommended model settings",
|
||||||
},
|
"temperature": "Temperature",
|
||||||
"data_description": {
|
|
||||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
|
||||||
},
|
|
||||||
"title": "Basic settings"
|
|
||||||
},
|
|
||||||
"model": {
|
|
||||||
"data": {
|
|
||||||
"thinking_budget": "Thinking budget",
|
"thinking_budget": "Thinking budget",
|
||||||
"user_location": "Include home location",
|
"user_location": "Include home location",
|
||||||
"web_search": "Enable web search",
|
"web_search": "Enable web search",
|
||||||
"web_search_max_uses": "Maximum web searches"
|
"web_search_max_uses": "Maximum web searches"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
|
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||||
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||||
"user_location": "Localize search results based on home location",
|
"user_location": "Localize search results based on home location",
|
||||||
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||||
"web_search_max_uses": "Limit the number of searches performed per response"
|
"web_search_max_uses": "Limit the number of searches performed per response"
|
||||||
},
|
}
|
||||||
"title": "Model-specific options"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -111,6 +111,8 @@ def handle_errors_and_zip[_AsusWrtBridgeT: AsusWrtBridge](
|
|||||||
|
|
||||||
if isinstance(data, dict):
|
if isinstance(data, dict):
|
||||||
return dict(zip(keys, list(data.values()), strict=False))
|
return dict(zip(keys, list(data.values()), strict=False))
|
||||||
|
if not isinstance(data, (list, tuple)):
|
||||||
|
raise UpdateFailed("Received invalid data type")
|
||||||
return dict(zip(keys, data, strict=False))
|
return dict(zip(keys, data, strict=False))
|
||||||
|
|
||||||
return _wrapper
|
return _wrapper
|
||||||
|
|||||||
@@ -14,11 +14,10 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
from homeassistant.helpers import (
|
||||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
config_entry_oauth2_flow,
|
||||||
ImplementationUnavailableError,
|
device_registry as dr,
|
||||||
OAuth2Session,
|
issue_registry as ir,
|
||||||
async_get_config_entry_implementation,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
|
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
|
||||||
@@ -38,10 +37,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
|
|||||||
|
|
||||||
session = async_create_august_clientsession(hass)
|
session = async_create_august_clientsession(hass)
|
||||||
try:
|
try:
|
||||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
implementation = (
|
||||||
except ImplementationUnavailableError as err:
|
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||||
|
hass, entry
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except ValueError as err:
|
||||||
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
||||||
oauth_session = OAuth2Session(hass, entry, implementation)
|
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||||
try:
|
try:
|
||||||
await async_setup_august(hass, entry, august_gateway)
|
await async_setup_august(hass, entry, august_gateway)
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["avea"],
|
"loggers": ["avea"],
|
||||||
"quality_scale": "legacy",
|
"quality_scale": "legacy",
|
||||||
"requirements": ["avea==1.6.1"]
|
"requirements": ["avea==1.5.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "calculated",
|
"iot_class": "calculated",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"requirements": ["mozart-api==5.1.0.247.1"],
|
"requirements": ["mozart-api==4.1.1.116.4"],
|
||||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["bluecurrent_api"],
|
"loggers": ["bluecurrent_api"],
|
||||||
"requirements": ["bluecurrent-api==1.3.2"]
|
"requirements": ["bluecurrent-api==1.3.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
title=self._discovered_devices[address], data={}
|
title=self._discovered_devices[address], data={}
|
||||||
)
|
)
|
||||||
|
|
||||||
current_addresses = self._async_current_ids(include_ignore=False)
|
current_addresses = self._async_current_ids()
|
||||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||||
address = discovery_info.address
|
address = discovery_info.address
|
||||||
if address in current_addresses or address in self._discovered_devices:
|
if address in current_addresses or address in self._discovered_devices:
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
"""The blueprint integration."""
|
"""The blueprint integration."""
|
||||||
|
|
||||||
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.discovery import async_load_platform
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from . import websocket_api
|
from . import websocket_api
|
||||||
@@ -28,4 +30,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
|||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the blueprint integration."""
|
"""Set up the blueprint integration."""
|
||||||
websocket_api.async_setup(hass)
|
websocket_api.async_setup(hass)
|
||||||
|
hass.async_create_task(
|
||||||
|
async_load_platform(hass, Platform.UPDATE, DOMAIN, None, config)
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -204,8 +204,8 @@ class DomainBlueprints:
|
|||||||
self.hass = hass
|
self.hass = hass
|
||||||
self.domain = domain
|
self.domain = domain
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
self._blueprint_in_use = blueprint_in_use
|
self.blueprint_in_use = blueprint_in_use
|
||||||
self._reload_blueprint_consumers = reload_blueprint_consumers
|
self.reload_blueprint_consumers = reload_blueprint_consumers
|
||||||
self._blueprints: dict[str, Blueprint | None] = {}
|
self._blueprints: dict[str, Blueprint | None] = {}
|
||||||
self._load_lock = asyncio.Lock()
|
self._load_lock = asyncio.Lock()
|
||||||
self._blueprint_schema = blueprint_schema
|
self._blueprint_schema = blueprint_schema
|
||||||
@@ -325,7 +325,7 @@ class DomainBlueprints:
|
|||||||
|
|
||||||
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
||||||
"""Remove a blueprint file."""
|
"""Remove a blueprint file."""
|
||||||
if self._blueprint_in_use(self.hass, blueprint_path):
|
if self.blueprint_in_use(self.hass, blueprint_path):
|
||||||
raise BlueprintInUse(self.domain, blueprint_path)
|
raise BlueprintInUse(self.domain, blueprint_path)
|
||||||
path = self.blueprint_folder / blueprint_path
|
path = self.blueprint_folder / blueprint_path
|
||||||
await self.hass.async_add_executor_job(path.unlink)
|
await self.hass.async_add_executor_job(path.unlink)
|
||||||
@@ -362,7 +362,7 @@ class DomainBlueprints:
|
|||||||
self._blueprints[blueprint_path] = blueprint
|
self._blueprints[blueprint_path] = blueprint
|
||||||
|
|
||||||
if overrides_existing:
|
if overrides_existing:
|
||||||
await self._reload_blueprint_consumers(self.hass, blueprint_path)
|
await self.reload_blueprint_consumers(self.hass, blueprint_path)
|
||||||
|
|
||||||
return overrides_existing
|
return overrides_existing
|
||||||
|
|
||||||
|
|||||||
293
homeassistant/components/blueprint/update.py
Normal file
293
homeassistant/components/blueprint/update.py
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
"""Update entities for blueprints."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import Any, Final
|
||||||
|
|
||||||
|
from homeassistant.components import automation, script
|
||||||
|
from . import importer, models
|
||||||
|
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||||
|
from homeassistant.const import CONF_SOURCE_URL
|
||||||
|
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers import event as event_helper
|
||||||
|
from homeassistant.helpers.entity import EntityCategory
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
|
from .const import DOMAIN as BLUEPRINT_DOMAIN
|
||||||
|
from .errors import BlueprintException
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_LATEST_VERSION_PLACEHOLDER: Final = "remote"
|
||||||
|
DATA_UPDATE_MANAGER: Final = "update_manager"
|
||||||
|
REFRESH_INTERVAL: Final = timedelta(days=1)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class BlueprintUsage:
|
||||||
|
"""Details about a blueprint currently in use."""
|
||||||
|
|
||||||
|
domain: str
|
||||||
|
path: str
|
||||||
|
domain_blueprints: models.DomainBlueprints
|
||||||
|
blueprint: models.Blueprint
|
||||||
|
entities: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_platform(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config: ConfigType,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the blueprint update platform."""
|
||||||
|
data = hass.data.setdefault(BLUEPRINT_DOMAIN, {})
|
||||||
|
|
||||||
|
if (manager := data.get(DATA_UPDATE_MANAGER)) is None:
|
||||||
|
manager = BlueprintUpdateManager(hass, async_add_entities)
|
||||||
|
data[DATA_UPDATE_MANAGER] = manager
|
||||||
|
await manager.async_start()
|
||||||
|
return
|
||||||
|
|
||||||
|
manager.replace_add_entities(async_add_entities)
|
||||||
|
await manager.async_recreate_entities()
|
||||||
|
|
||||||
|
|
||||||
|
class BlueprintUpdateManager:
|
||||||
|
"""Manage blueprint update entities based on blueprint usage."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, hass: HomeAssistant, async_add_entities: AddEntitiesCallback
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the manager."""
|
||||||
|
self.hass = hass
|
||||||
|
self._async_add_entities = async_add_entities
|
||||||
|
self._entities: dict[tuple[str, str], BlueprintUpdateEntity] = {}
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
self._refresh_cancel: CALLBACK_TYPE | None = None
|
||||||
|
self._started = False
|
||||||
|
self._interval_unsub: CALLBACK_TYPE | None = None
|
||||||
|
|
||||||
|
async def async_start(self) -> None:
|
||||||
|
"""Start tracking blueprint usage."""
|
||||||
|
if self._started:
|
||||||
|
return
|
||||||
|
self._started = True
|
||||||
|
|
||||||
|
self._interval_unsub = event_helper.async_track_time_interval(
|
||||||
|
self.hass, self._handle_time_interval, REFRESH_INTERVAL
|
||||||
|
)
|
||||||
|
await self.async_refresh_entities()
|
||||||
|
|
||||||
|
def replace_add_entities(self, async_add_entities: AddEntitiesCallback) -> None:
|
||||||
|
"""Update the callback used to register entities."""
|
||||||
|
self._async_add_entities = async_add_entities
|
||||||
|
|
||||||
|
async def async_recreate_entities(self) -> None:
|
||||||
|
"""Recreate entities after the platform has been reloaded."""
|
||||||
|
async with self._lock:
|
||||||
|
entities = list(self._entities.values())
|
||||||
|
self._entities.clear()
|
||||||
|
|
||||||
|
for entity in entities:
|
||||||
|
await entity.async_remove()
|
||||||
|
|
||||||
|
await self.async_refresh_entities()
|
||||||
|
|
||||||
|
async def async_refresh_entities(self) -> None:
|
||||||
|
"""Refresh update entities based on current blueprint usage."""
|
||||||
|
async with self._lock:
|
||||||
|
usage_map = await self._async_collect_in_use_blueprints()
|
||||||
|
|
||||||
|
current_keys = set(self._entities)
|
||||||
|
new_keys = set(usage_map)
|
||||||
|
|
||||||
|
for key in current_keys - new_keys:
|
||||||
|
entity = self._entities.pop(key)
|
||||||
|
await entity.async_remove()
|
||||||
|
|
||||||
|
new_entities: list[BlueprintUpdateEntity] = []
|
||||||
|
|
||||||
|
for key in new_keys - current_keys:
|
||||||
|
usage = usage_map[key]
|
||||||
|
entity = BlueprintUpdateEntity(self, usage)
|
||||||
|
self._entities[key] = entity
|
||||||
|
new_entities.append(entity)
|
||||||
|
|
||||||
|
for key in new_keys & current_keys:
|
||||||
|
self._entities[key].update_usage(usage_map[key])
|
||||||
|
self._entities[key].async_write_ha_state()
|
||||||
|
|
||||||
|
if new_entities:
|
||||||
|
self._async_add_entities(new_entities)
|
||||||
|
|
||||||
|
def async_schedule_refresh(self) -> None:
|
||||||
|
"""Schedule an asynchronous refresh."""
|
||||||
|
if self._refresh_cancel is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._refresh_cancel = event_helper.async_call_later(
|
||||||
|
self.hass, 0, self._handle_scheduled_refresh
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_scheduled_refresh(self, _now: Any) -> None:
|
||||||
|
"""Run a scheduled refresh task."""
|
||||||
|
self._refresh_cancel = None
|
||||||
|
self.hass.async_create_task(self.async_refresh_entities())
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_time_interval(self, _now: Any) -> None:
|
||||||
|
"""Handle scheduled interval refresh."""
|
||||||
|
self.async_schedule_refresh()
|
||||||
|
|
||||||
|
async def _async_collect_in_use_blueprints(self) -> dict[tuple[str, str], BlueprintUsage]:
|
||||||
|
"""Collect blueprint usage information for automations and scripts."""
|
||||||
|
|
||||||
|
usage_keys: set[tuple[str, str]] = set()
|
||||||
|
|
||||||
|
if automation.DATA_COMPONENT in self.hass.data:
|
||||||
|
component = self.hass.data[automation.DATA_COMPONENT]
|
||||||
|
for automation_entity in list(component.entities):
|
||||||
|
if (path := getattr(automation_entity, "referenced_blueprint", None)):
|
||||||
|
usage_keys.add((automation.DOMAIN, path))
|
||||||
|
|
||||||
|
if script.DOMAIN in self.hass.data:
|
||||||
|
component = self.hass.data[script.DOMAIN]
|
||||||
|
for script_entity in list(component.entities):
|
||||||
|
if (path := getattr(script_entity, "referenced_blueprint", None)):
|
||||||
|
usage_keys.add((script.DOMAIN, path))
|
||||||
|
|
||||||
|
domain_blueprints_map = self.hass.data.get(BLUEPRINT_DOMAIN, {})
|
||||||
|
usage_map: dict[tuple[str, str], BlueprintUsage] = {}
|
||||||
|
|
||||||
|
for domain, path in usage_keys:
|
||||||
|
domain_blueprints: models.DomainBlueprints | None = domain_blueprints_map.get(
|
||||||
|
domain
|
||||||
|
)
|
||||||
|
|
||||||
|
if domain_blueprints is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not domain_blueprints.blueprint_in_use(self.hass, path):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
blueprint = await domain_blueprints.async_get_blueprint(path)
|
||||||
|
except BlueprintException:
|
||||||
|
continue
|
||||||
|
|
||||||
|
source_url = blueprint.metadata.get(CONF_SOURCE_URL)
|
||||||
|
if not source_url:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if domain == automation.DOMAIN:
|
||||||
|
entities = automation.automations_with_blueprint(self.hass, path)
|
||||||
|
elif domain == script.DOMAIN:
|
||||||
|
entities = script.scripts_with_blueprint(self.hass, path)
|
||||||
|
else:
|
||||||
|
entities = []
|
||||||
|
|
||||||
|
usage_map[(domain, path)] = BlueprintUsage(
|
||||||
|
domain=domain,
|
||||||
|
path=path,
|
||||||
|
domain_blueprints=domain_blueprints,
|
||||||
|
blueprint=blueprint,
|
||||||
|
entities=entities,
|
||||||
|
)
|
||||||
|
|
||||||
|
return usage_map
|
||||||
|
|
||||||
|
|
||||||
|
class BlueprintUpdateEntity(UpdateEntity):
|
||||||
|
"""Define a blueprint update entity."""
|
||||||
|
|
||||||
|
_attr_entity_category = EntityCategory.CONFIG
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_attr_should_poll = False
|
||||||
|
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||||
|
|
||||||
|
def __init__(self, manager: BlueprintUpdateManager, usage: BlueprintUsage) -> None:
|
||||||
|
"""Initialize the update entity."""
|
||||||
|
self._manager = manager
|
||||||
|
self._domain = usage.domain
|
||||||
|
self._path = usage.path
|
||||||
|
self._domain_blueprints = usage.domain_blueprints
|
||||||
|
self._blueprint = usage.blueprint
|
||||||
|
self._entities_in_use = usage.entities
|
||||||
|
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||||
|
self._attr_unique_id = f"{self._domain}:{self._path}"
|
||||||
|
self._attr_in_progress = False
|
||||||
|
|
||||||
|
self.update_usage(usage)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def update_usage(self, usage: BlueprintUsage) -> None:
|
||||||
|
"""Update the entity with latest usage information."""
|
||||||
|
self._domain_blueprints = usage.domain_blueprints
|
||||||
|
self._blueprint = usage.blueprint
|
||||||
|
self._entities_in_use = usage.entities
|
||||||
|
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||||
|
|
||||||
|
self._attr_name = usage.blueprint.name
|
||||||
|
self._attr_release_summary = usage.blueprint.metadata.get("description")
|
||||||
|
self._attr_installed_version = usage.blueprint.metadata.get("version")
|
||||||
|
self._attr_release_url = self._source_url
|
||||||
|
self._attr_available = self._source_url is not None
|
||||||
|
self._attr_latest_version = (
|
||||||
|
_LATEST_VERSION_PLACEHOLDER
|
||||||
|
if self._source_url is not None
|
||||||
|
else self._attr_installed_version
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_install(self, version: str | None, backup: bool) -> None:
|
||||||
|
"""Install (refresh) the blueprint from its source."""
|
||||||
|
if self._source_url is None:
|
||||||
|
raise HomeAssistantError("Blueprint does not define a source URL")
|
||||||
|
|
||||||
|
self._attr_in_progress = True
|
||||||
|
self.async_write_ha_state()
|
||||||
|
usage: BlueprintUsage | None = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
imported = await importer.fetch_blueprint_from_url(
|
||||||
|
self.hass, self._source_url
|
||||||
|
)
|
||||||
|
blueprint = imported.blueprint
|
||||||
|
|
||||||
|
if blueprint.domain != self._domain:
|
||||||
|
raise HomeAssistantError(
|
||||||
|
"Downloaded blueprint domain does not match the existing blueprint"
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._domain_blueprints.async_add_blueprint(
|
||||||
|
blueprint, self._path, allow_override=True
|
||||||
|
)
|
||||||
|
|
||||||
|
usage = BlueprintUsage(
|
||||||
|
domain=self._domain,
|
||||||
|
path=self._path,
|
||||||
|
domain_blueprints=self._domain_blueprints,
|
||||||
|
blueprint=blueprint,
|
||||||
|
entities=self._entities_in_use,
|
||||||
|
)
|
||||||
|
|
||||||
|
except HomeAssistantError:
|
||||||
|
raise
|
||||||
|
except Exception as err: # noqa: BLE001 - Provide context for unexpected errors
|
||||||
|
raise HomeAssistantError("Failed to update blueprint from source") from err
|
||||||
|
finally:
|
||||||
|
self._attr_in_progress = False
|
||||||
|
|
||||||
|
if usage is not None:
|
||||||
|
self.update_usage(usage)
|
||||||
|
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
self._manager.async_schedule_refresh()
|
||||||
@@ -20,7 +20,7 @@
|
|||||||
"bluetooth-adapters==2.1.0",
|
"bluetooth-adapters==2.1.0",
|
||||||
"bluetooth-auto-recovery==1.5.3",
|
"bluetooth-auto-recovery==1.5.3",
|
||||||
"bluetooth-data-tools==1.28.4",
|
"bluetooth-data-tools==1.28.4",
|
||||||
"dbus-fast==2.45.0",
|
"dbus-fast==2.44.5",
|
||||||
"habluetooth==5.7.0"
|
"habluetooth==5.7.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -99,12 +99,6 @@ def deserialize_entity_description(
|
|||||||
descriptions_class = descriptions_class._dataclass # noqa: SLF001
|
descriptions_class = descriptions_class._dataclass # noqa: SLF001
|
||||||
for field in cached_fields(descriptions_class):
|
for field in cached_fields(descriptions_class):
|
||||||
field_name = field.name
|
field_name = field.name
|
||||||
# Only set fields that are in the data
|
|
||||||
# otherwise we would override default values with None
|
|
||||||
# causing side effects
|
|
||||||
if field_name not in data:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# It would be nice if field.type returned the actual
|
# It would be nice if field.type returned the actual
|
||||||
# type instead of a str so we could avoid writing this
|
# type instead of a str so we could avoid writing this
|
||||||
# out, but it doesn't. If we end up using this in more
|
# out, but it doesn't. If we end up using this in more
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from brother import Brother, SnmpError
|
|||||||
from homeassistant.components.snmp import async_get_snmp_engine
|
from homeassistant.components.snmp import async_get_snmp_engine
|
||||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_COMMUNITY,
|
CONF_COMMUNITY,
|
||||||
@@ -50,15 +50,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
|||||||
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
if brother.serial.lower() != entry.unique_id:
|
|
||||||
raise ConfigEntryError(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="serial_mismatch",
|
|
||||||
translation_placeholders={
|
|
||||||
"device": entry.title,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
entry.runtime_data = coordinator
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.data_entry_flow import section
|
from homeassistant.data_entry_flow import section
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
|
||||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||||
from homeassistant.util.network import is_host_valid
|
from homeassistant.util.network import is_host_valid
|
||||||
|
|
||||||
@@ -22,7 +21,6 @@ from .const import (
|
|||||||
DEFAULT_COMMUNITY,
|
DEFAULT_COMMUNITY,
|
||||||
DEFAULT_PORT,
|
DEFAULT_PORT,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
PRINTER_TYPE_LASER,
|
|
||||||
PRINTER_TYPES,
|
PRINTER_TYPES,
|
||||||
SECTION_ADVANCED_SETTINGS,
|
SECTION_ADVANCED_SETTINGS,
|
||||||
)
|
)
|
||||||
@@ -30,12 +28,7 @@ from .const import (
|
|||||||
DATA_SCHEMA = vol.Schema(
|
DATA_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_HOST): str,
|
vol.Required(CONF_HOST): str,
|
||||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||||
SelectSelectorConfig(
|
|
||||||
options=PRINTER_TYPES,
|
|
||||||
translation_key="printer_type",
|
|
||||||
)
|
|
||||||
),
|
|
||||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
vol.Schema(
|
vol.Schema(
|
||||||
{
|
{
|
||||||
@@ -49,12 +42,7 @@ DATA_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
ZEROCONF_SCHEMA = vol.Schema(
|
ZEROCONF_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||||
SelectSelectorConfig(
|
|
||||||
options=PRINTER_TYPES,
|
|
||||||
translation_key="printer_type",
|
|
||||||
)
|
|
||||||
),
|
|
||||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
vol.Schema(
|
vol.Schema(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -7,10 +7,7 @@ from typing import Final
|
|||||||
|
|
||||||
DOMAIN: Final = "brother"
|
DOMAIN: Final = "brother"
|
||||||
|
|
||||||
PRINTER_TYPE_LASER = "laser"
|
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||||
PRINTER_TYPE_INK = "ink"
|
|
||||||
|
|
||||||
PRINTER_TYPES: Final = [PRINTER_TYPE_LASER, PRINTER_TYPE_INK]
|
|
||||||
|
|
||||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||||
|
|
||||||
|
|||||||
@@ -1,30 +0,0 @@
|
|||||||
"""Define the Brother entity."""
|
|
||||||
|
|
||||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
|
||||||
from .coordinator import BrotherDataUpdateCoordinator
|
|
||||||
|
|
||||||
|
|
||||||
class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
|
||||||
"""Define a Brother Printer entity."""
|
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
coordinator: BrotherDataUpdateCoordinator,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize."""
|
|
||||||
super().__init__(coordinator)
|
|
||||||
self._attr_device_info = DeviceInfo(
|
|
||||||
configuration_url=f"http://{coordinator.brother.host}/",
|
|
||||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
|
||||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
|
||||||
serial_number=coordinator.brother.serial,
|
|
||||||
manufacturer="Brother",
|
|
||||||
model=coordinator.brother.model,
|
|
||||||
name=coordinator.brother.model,
|
|
||||||
sw_version=coordinator.brother.firmware,
|
|
||||||
)
|
|
||||||
@@ -19,15 +19,13 @@ from homeassistant.components.sensor import (
|
|||||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.typing import StateType
|
from homeassistant.helpers.typing import StateType
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||||
from .entity import BrotherPrinterEntity
|
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
|
||||||
PARALLEL_UPDATES = 0
|
|
||||||
|
|
||||||
ATTR_COUNTER = "counter"
|
ATTR_COUNTER = "counter"
|
||||||
ATTR_REMAINING_PAGES = "remaining_pages"
|
ATTR_REMAINING_PAGES = "remaining_pages"
|
||||||
@@ -332,9 +330,12 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
class BrotherPrinterSensor(
|
||||||
"""Define a Brother Printer sensor."""
|
CoordinatorEntity[BrotherDataUpdateCoordinator], SensorEntity
|
||||||
|
):
|
||||||
|
"""Define an Brother Printer sensor."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
entity_description: BrotherSensorEntityDescription
|
entity_description: BrotherSensorEntityDescription
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -344,7 +345,16 @@ class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
configuration_url=f"http://{coordinator.brother.host}/",
|
||||||
|
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||||
|
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||||
|
serial_number=coordinator.brother.serial,
|
||||||
|
manufacturer="Brother",
|
||||||
|
model=coordinator.brother.model,
|
||||||
|
name=coordinator.brother.model,
|
||||||
|
sw_version=coordinator.brother.firmware,
|
||||||
|
)
|
||||||
self._attr_native_value = description.value(coordinator.data)
|
self._attr_native_value = description.value(coordinator.data)
|
||||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
|
|||||||
@@ -38,11 +38,11 @@
|
|||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
"type": "Printer type"
|
"type": "Type of the printer"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"host": "The hostname or IP address of the Brother printer to control.",
|
"host": "The hostname or IP address of the Brother printer to control.",
|
||||||
"type": "The type of the Brother printer."
|
"type": "Brother printer type: ink or laser."
|
||||||
},
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"advanced_settings": {
|
"advanced_settings": {
|
||||||
@@ -207,19 +207,8 @@
|
|||||||
"cannot_connect": {
|
"cannot_connect": {
|
||||||
"message": "An error occurred while connecting to the {device} printer: {error}"
|
"message": "An error occurred while connecting to the {device} printer: {error}"
|
||||||
},
|
},
|
||||||
"serial_mismatch": {
|
|
||||||
"message": "The serial number for {device} doesn't match the one in the configuration. It's possible that the two Brother printers have swapped IP addresses. Restore the previous IP address configuration or reconfigure the devices with Home Assistant."
|
|
||||||
},
|
|
||||||
"update_error": {
|
"update_error": {
|
||||||
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"selector": {
|
|
||||||
"printer_type": {
|
|
||||||
"options": {
|
|
||||||
"ink": "ink",
|
|
||||||
"laser": "laser"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -189,7 +189,7 @@ class BryantEvolutionClimate(ClimateEntity):
|
|||||||
return HVACAction.HEATING
|
return HVACAction.HEATING
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="failed_to_parse_hvac_action",
|
translation_key="failed_to_parse_hvac_mode",
|
||||||
translation_placeholders={
|
translation_placeholders={
|
||||||
"mode_and_active": mode_and_active,
|
"mode_and_active": mode_and_active,
|
||||||
"current_temperature": str(self.current_temperature),
|
"current_temperature": str(self.current_temperature),
|
||||||
|
|||||||
@@ -24,7 +24,7 @@
|
|||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
"failed_to_parse_hvac_action": {
|
"failed_to_parse_hvac_action": {
|
||||||
"message": "Could not determine HVAC action: {mode_and_active}, {current_temperature}, {target_temperature_low}"
|
"message": "Could not determine HVAC action: {mode_and_active}, {self.current_temperature}, {self.target_temperature_low}"
|
||||||
},
|
},
|
||||||
"failed_to_parse_hvac_mode": {
|
"failed_to_parse_hvac_mode": {
|
||||||
"message": "Cannot parse response to HVACMode: {mode}"
|
"message": "Cannot parse response to HVACMode: {mode}"
|
||||||
|
|||||||
@@ -74,11 +74,8 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
|||||||
super().__init__(data.fast_coordinator, data)
|
super().__init__(data.fast_coordinator, data)
|
||||||
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
||||||
|
|
||||||
# Set temperature range if available, otherwise use Home Assistant defaults
|
self._attr_min_temp = data.static.min_temp.value
|
||||||
if data.static.min_temp is not None and data.static.min_temp.value is not None:
|
self._attr_max_temp = data.static.max_temp.value
|
||||||
self._attr_min_temp = data.static.min_temp.value
|
|
||||||
if data.static.max_temp is not None and data.static.max_temp.value is not None:
|
|
||||||
self._attr_max_temp = data.static.max_temp.value
|
|
||||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["bsblan"],
|
"loggers": ["bsblan"],
|
||||||
"requirements": ["python-bsblan==3.1.1"],
|
"requirements": ["python-bsblan==3.1.0"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"name": "bsb-lan*",
|
"name": "bsb-lan*",
|
||||||
|
|||||||
@@ -63,7 +63,6 @@ BINARY_SENSOR_DESCRIPTIONS = {
|
|||||||
),
|
),
|
||||||
BTHomeBinarySensorDeviceClass.GENERIC: BinarySensorEntityDescription(
|
BTHomeBinarySensorDeviceClass.GENERIC: BinarySensorEntityDescription(
|
||||||
key=BTHomeBinarySensorDeviceClass.GENERIC,
|
key=BTHomeBinarySensorDeviceClass.GENERIC,
|
||||||
translation_key="generic",
|
|
||||||
),
|
),
|
||||||
BTHomeBinarySensorDeviceClass.LIGHT: BinarySensorEntityDescription(
|
BTHomeBinarySensorDeviceClass.LIGHT: BinarySensorEntityDescription(
|
||||||
key=BTHomeBinarySensorDeviceClass.LIGHT,
|
key=BTHomeBinarySensorDeviceClass.LIGHT,
|
||||||
@@ -160,7 +159,10 @@ def sensor_update_to_bluetooth_data_update(
|
|||||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
||||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||||
},
|
},
|
||||||
entity_names={},
|
entity_names={
|
||||||
|
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||||
|
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -59,7 +59,6 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
key=f"{BTHomeSensorDeviceClass.ACCELERATION}_{Units.ACCELERATION_METERS_PER_SQUARE_SECOND}",
|
key=f"{BTHomeSensorDeviceClass.ACCELERATION}_{Units.ACCELERATION_METERS_PER_SQUARE_SECOND}",
|
||||||
native_unit_of_measurement=Units.ACCELERATION_METERS_PER_SQUARE_SECOND,
|
native_unit_of_measurement=Units.ACCELERATION_METERS_PER_SQUARE_SECOND,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="acceleration",
|
|
||||||
),
|
),
|
||||||
# Battery (percent)
|
# Battery (percent)
|
||||||
(BTHomeSensorDeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription(
|
(BTHomeSensorDeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription(
|
||||||
@@ -73,7 +72,6 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
(BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
|
(BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
|
||||||
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="channel",
|
|
||||||
),
|
),
|
||||||
# Conductivity (μS/cm)
|
# Conductivity (μS/cm)
|
||||||
(
|
(
|
||||||
@@ -89,7 +87,6 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
(BTHomeSensorDeviceClass.COUNT, None): SensorEntityDescription(
|
(BTHomeSensorDeviceClass.COUNT, None): SensorEntityDescription(
|
||||||
key=str(BTHomeSensorDeviceClass.COUNT),
|
key=str(BTHomeSensorDeviceClass.COUNT),
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="count",
|
|
||||||
),
|
),
|
||||||
# CO2 (parts per million)
|
# CO2 (parts per million)
|
||||||
(
|
(
|
||||||
@@ -117,14 +114,12 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="dew_point",
|
|
||||||
),
|
),
|
||||||
# Directions (°)
|
# Directions (°)
|
||||||
(BTHomeExtendedSensorDeviceClass.DIRECTION, Units.DEGREE): SensorEntityDescription(
|
(BTHomeExtendedSensorDeviceClass.DIRECTION, Units.DEGREE): SensorEntityDescription(
|
||||||
key=f"{BTHomeExtendedSensorDeviceClass.DIRECTION}_{Units.DEGREE}",
|
key=f"{BTHomeExtendedSensorDeviceClass.DIRECTION}_{Units.DEGREE}",
|
||||||
native_unit_of_measurement=DEGREE,
|
native_unit_of_measurement=DEGREE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="direction",
|
|
||||||
),
|
),
|
||||||
# Distance (mm)
|
# Distance (mm)
|
||||||
(
|
(
|
||||||
@@ -178,7 +173,6 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
key=f"{BTHomeSensorDeviceClass.GYROSCOPE}_{Units.GYROSCOPE_DEGREES_PER_SECOND}",
|
key=f"{BTHomeSensorDeviceClass.GYROSCOPE}_{Units.GYROSCOPE_DEGREES_PER_SECOND}",
|
||||||
native_unit_of_measurement=Units.GYROSCOPE_DEGREES_PER_SECOND,
|
native_unit_of_measurement=Units.GYROSCOPE_DEGREES_PER_SECOND,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="gyroscope",
|
|
||||||
),
|
),
|
||||||
# Humidity in (percent)
|
# Humidity in (percent)
|
||||||
(BTHomeSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
|
(BTHomeSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
|
||||||
@@ -221,7 +215,6 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
translation_key="packet_id",
|
|
||||||
),
|
),
|
||||||
# PM10 (μg/m3)
|
# PM10 (μg/m3)
|
||||||
(
|
(
|
||||||
@@ -270,14 +263,12 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
# Raw (-)
|
# Raw (-)
|
||||||
(BTHomeExtendedSensorDeviceClass.RAW, None): SensorEntityDescription(
|
(BTHomeExtendedSensorDeviceClass.RAW, None): SensorEntityDescription(
|
||||||
key=str(BTHomeExtendedSensorDeviceClass.RAW),
|
key=str(BTHomeExtendedSensorDeviceClass.RAW),
|
||||||
translation_key="raw",
|
|
||||||
),
|
),
|
||||||
# Rotation (°)
|
# Rotation (°)
|
||||||
(BTHomeSensorDeviceClass.ROTATION, Units.DEGREE): SensorEntityDescription(
|
(BTHomeSensorDeviceClass.ROTATION, Units.DEGREE): SensorEntityDescription(
|
||||||
key=f"{BTHomeSensorDeviceClass.ROTATION}_{Units.DEGREE}",
|
key=f"{BTHomeSensorDeviceClass.ROTATION}_{Units.DEGREE}",
|
||||||
native_unit_of_measurement=DEGREE,
|
native_unit_of_measurement=DEGREE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="rotation",
|
|
||||||
),
|
),
|
||||||
# Rotational speed (rpm)
|
# Rotational speed (rpm)
|
||||||
(
|
(
|
||||||
@@ -287,7 +278,6 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
key=f"{BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED}_{Units.REVOLUTIONS_PER_MINUTE}",
|
key=f"{BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED}_{Units.REVOLUTIONS_PER_MINUTE}",
|
||||||
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="rotational_speed",
|
|
||||||
),
|
),
|
||||||
# Signal Strength (RSSI) (dB)
|
# Signal Strength (RSSI) (dB)
|
||||||
(
|
(
|
||||||
@@ -321,7 +311,6 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
# Text (-)
|
# Text (-)
|
||||||
(BTHomeExtendedSensorDeviceClass.TEXT, None): SensorEntityDescription(
|
(BTHomeExtendedSensorDeviceClass.TEXT, None): SensorEntityDescription(
|
||||||
key=str(BTHomeExtendedSensorDeviceClass.TEXT),
|
key=str(BTHomeExtendedSensorDeviceClass.TEXT),
|
||||||
translation_key="text",
|
|
||||||
),
|
),
|
||||||
# Timestamp (datetime object)
|
# Timestamp (datetime object)
|
||||||
(
|
(
|
||||||
@@ -338,7 +327,6 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
): SensorEntityDescription(
|
): SensorEntityDescription(
|
||||||
key=str(BTHomeSensorDeviceClass.UV_INDEX),
|
key=str(BTHomeSensorDeviceClass.UV_INDEX),
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
translation_key="uv_index",
|
|
||||||
),
|
),
|
||||||
# Volatile organic Compounds (VOC) (μg/m3)
|
# Volatile organic Compounds (VOC) (μg/m3)
|
||||||
(
|
(
|
||||||
@@ -435,7 +423,10 @@ def sensor_update_to_bluetooth_data_update(
|
|||||||
)
|
)
|
||||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||||
},
|
},
|
||||||
entity_names={},
|
entity_names={
|
||||||
|
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||||
|
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -47,11 +47,6 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
"binary_sensor": {
|
|
||||||
"generic": {
|
|
||||||
"name": "Generic"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"event": {
|
"event": {
|
||||||
"button": {
|
"button": {
|
||||||
"state_attributes": {
|
"state_attributes": {
|
||||||
@@ -78,44 +73,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"sensor": {
|
|
||||||
"acceleration": {
|
|
||||||
"name": "Acceleration"
|
|
||||||
},
|
|
||||||
"channel": {
|
|
||||||
"name": "Channel"
|
|
||||||
},
|
|
||||||
"count": {
|
|
||||||
"name": "Count"
|
|
||||||
},
|
|
||||||
"dew_point": {
|
|
||||||
"name": "Dew point"
|
|
||||||
},
|
|
||||||
"direction": {
|
|
||||||
"name": "Direction"
|
|
||||||
},
|
|
||||||
"gyroscope": {
|
|
||||||
"name": "Gyroscope"
|
|
||||||
},
|
|
||||||
"packet_id": {
|
|
||||||
"name": "Packet ID"
|
|
||||||
},
|
|
||||||
"raw": {
|
|
||||||
"name": "Raw"
|
|
||||||
},
|
|
||||||
"rotation": {
|
|
||||||
"name": "Rotation"
|
|
||||||
},
|
|
||||||
"rotational_speed": {
|
|
||||||
"name": "Rotational speed"
|
|
||||||
},
|
|
||||||
"text": {
|
|
||||||
"name": "Text"
|
|
||||||
},
|
|
||||||
"uv_index": {
|
|
||||||
"name": "UV Index"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["caldav", "vobject"],
|
"loggers": ["caldav", "vobject"],
|
||||||
"requirements": ["caldav==2.1.0", "icalendar==6.3.1", "vobject==0.9.9"]
|
"requirements": ["caldav==1.6.0", "icalendar==6.3.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,9 +57,9 @@ async def _async_reproduce_states(
|
|||||||
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
|
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
|
||||||
|
|
||||||
if (
|
if (
|
||||||
(state.attributes.get(ATTR_TEMPERATURE) is not None)
|
(ATTR_TEMPERATURE in state.attributes)
|
||||||
or (state.attributes.get(ATTR_TARGET_TEMP_HIGH) is not None)
|
or (ATTR_TARGET_TEMP_HIGH in state.attributes)
|
||||||
or (state.attributes.get(ATTR_TARGET_TEMP_LOW) is not None)
|
or (ATTR_TARGET_TEMP_LOW in state.attributes)
|
||||||
):
|
):
|
||||||
await call_service(
|
await call_service(
|
||||||
SERVICE_SET_TEMPERATURE,
|
SERVICE_SET_TEMPERATURE,
|
||||||
|
|||||||
@@ -55,7 +55,6 @@ from .const import (
|
|||||||
CONF_ALIASES,
|
CONF_ALIASES,
|
||||||
CONF_API_SERVER,
|
CONF_API_SERVER,
|
||||||
CONF_COGNITO_CLIENT_ID,
|
CONF_COGNITO_CLIENT_ID,
|
||||||
CONF_DISCOVERY_SERVICE_ACTIONS,
|
|
||||||
CONF_ENTITY_CONFIG,
|
CONF_ENTITY_CONFIG,
|
||||||
CONF_FILTER,
|
CONF_FILTER,
|
||||||
CONF_GOOGLE_ACTIONS,
|
CONF_GOOGLE_ACTIONS,
|
||||||
@@ -140,7 +139,6 @@ CONFIG_SCHEMA = vol.Schema(
|
|||||||
{
|
{
|
||||||
vol.Required(CONF_MODE): vol.In([MODE_DEV]),
|
vol.Required(CONF_MODE): vol.In([MODE_DEV]),
|
||||||
vol.Required(CONF_API_SERVER): str,
|
vol.Required(CONF_API_SERVER): str,
|
||||||
vol.Optional(CONF_DISCOVERY_SERVICE_ACTIONS): {str: cv.url},
|
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
_BASE_CONFIG_SCHEMA.extend(
|
_BASE_CONFIG_SCHEMA.extend(
|
||||||
|
|||||||
@@ -71,11 +71,8 @@ async def _get_services(hass: HomeAssistant) -> list[dict[str, Any]]:
|
|||||||
services = await account_link.async_fetch_available_services(
|
services = await account_link.async_fetch_available_services(
|
||||||
hass.data[DATA_CLOUD]
|
hass.data[DATA_CLOUD]
|
||||||
)
|
)
|
||||||
except (aiohttp.ClientError, TimeoutError) as err:
|
except (aiohttp.ClientError, TimeoutError):
|
||||||
raise config_entry_oauth2_flow.ImplementationUnavailableError(
|
return []
|
||||||
"Cannot provide OAuth2 implementation for cloud services. "
|
|
||||||
"Failed to fetch from account link server."
|
|
||||||
) from err
|
|
||||||
|
|
||||||
hass.data[DATA_SERVICES] = services
|
hass.data[DATA_SERVICES] = services
|
||||||
|
|
||||||
|
|||||||
@@ -79,7 +79,6 @@ CONF_ACCOUNT_LINK_SERVER = "account_link_server"
|
|||||||
CONF_ACCOUNTS_SERVER = "accounts_server"
|
CONF_ACCOUNTS_SERVER = "accounts_server"
|
||||||
CONF_ACME_SERVER = "acme_server"
|
CONF_ACME_SERVER = "acme_server"
|
||||||
CONF_API_SERVER = "api_server"
|
CONF_API_SERVER = "api_server"
|
||||||
CONF_DISCOVERY_SERVICE_ACTIONS = "discovery_service_actions"
|
|
||||||
CONF_RELAYER_SERVER = "relayer_server"
|
CONF_RELAYER_SERVER = "relayer_server"
|
||||||
CONF_REMOTESTATE_SERVER = "remotestate_server"
|
CONF_REMOTESTATE_SERVER = "remotestate_server"
|
||||||
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"
|
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"
|
||||||
|
|||||||
@@ -14,9 +14,8 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import ObjectClassType
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||||
from .utils import new_device_listener
|
from .utils import DeviceType, new_device_listener
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -31,7 +30,7 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||||
"""Add entities for new monitors."""
|
"""Add entities for new monitors."""
|
||||||
entities = [
|
entities = [
|
||||||
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
||||||
|
|||||||
@@ -37,6 +37,13 @@ USER_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||||
|
STEP_RECONFIGURE = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_HOST): cv.string,
|
||||||
|
vol.Required(CONF_PORT): cv.port,
|
||||||
|
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||||
@@ -168,55 +175,36 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle reconfiguration of the device."""
|
"""Handle reconfiguration of the device."""
|
||||||
reconfigure_entry = self._get_reconfigure_entry()
|
reconfigure_entry = self._get_reconfigure_entry()
|
||||||
|
if not user_input:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reconfigure", data_schema=STEP_RECONFIGURE
|
||||||
|
)
|
||||||
|
|
||||||
|
updated_host = user_input[CONF_HOST]
|
||||||
|
|
||||||
|
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||||
|
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
if user_input is not None:
|
try:
|
||||||
updated_host = user_input[CONF_HOST]
|
await validate_input(self.hass, user_input)
|
||||||
|
except CannotConnect:
|
||||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
errors["base"] = "cannot_connect"
|
||||||
|
except InvalidAuth:
|
||||||
try:
|
errors["base"] = "invalid_auth"
|
||||||
data_to_validate = {
|
except InvalidPin:
|
||||||
CONF_HOST: updated_host,
|
errors["base"] = "invalid_pin"
|
||||||
CONF_PORT: user_input[CONF_PORT],
|
except Exception: # noqa: BLE001
|
||||||
CONF_PIN: user_input[CONF_PIN],
|
_LOGGER.exception("Unexpected exception")
|
||||||
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
|
errors["base"] = "unknown"
|
||||||
}
|
else:
|
||||||
await validate_input(self.hass, data_to_validate)
|
return self.async_update_reload_and_abort(
|
||||||
except CannotConnect:
|
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||||
errors["base"] = "cannot_connect"
|
)
|
||||||
except InvalidAuth:
|
|
||||||
errors["base"] = "invalid_auth"
|
|
||||||
except InvalidPin:
|
|
||||||
errors["base"] = "invalid_pin"
|
|
||||||
except Exception: # noqa: BLE001
|
|
||||||
_LOGGER.exception("Unexpected exception")
|
|
||||||
errors["base"] = "unknown"
|
|
||||||
else:
|
|
||||||
data_updates = {
|
|
||||||
CONF_HOST: updated_host,
|
|
||||||
CONF_PORT: user_input[CONF_PORT],
|
|
||||||
CONF_PIN: user_input[CONF_PIN],
|
|
||||||
}
|
|
||||||
return self.async_update_reload_and_abort(
|
|
||||||
reconfigure_entry, data_updates=data_updates
|
|
||||||
)
|
|
||||||
|
|
||||||
schema = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(
|
|
||||||
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
|
|
||||||
): cv.string,
|
|
||||||
vol.Required(
|
|
||||||
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
|
|
||||||
): cv.port,
|
|
||||||
vol.Optional(CONF_PIN): cv.string,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="reconfigure",
|
step_id="reconfigure",
|
||||||
data_schema=schema,
|
data_schema=STEP_RECONFIGURE,
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -2,20 +2,10 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aiocomelit.api import (
|
|
||||||
ComelitSerialBridgeObject,
|
|
||||||
ComelitVedoAreaObject,
|
|
||||||
ComelitVedoZoneObject,
|
|
||||||
)
|
|
||||||
from aiocomelit.const import BRIDGE, VEDO
|
from aiocomelit.const import BRIDGE, VEDO
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__package__)
|
_LOGGER = logging.getLogger(__package__)
|
||||||
|
|
||||||
ObjectClassType = (
|
|
||||||
ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
DOMAIN = "comelit"
|
DOMAIN = "comelit"
|
||||||
DEFAULT_PORT = 80
|
DEFAULT_PORT = 80
|
||||||
DEVICE_TYPE_LIST = [BRIDGE, VEDO]
|
DEVICE_TYPE_LIST = [BRIDGE, VEDO]
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ from aiocomelit.api import (
|
|||||||
ComeliteSerialBridgeApi,
|
ComeliteSerialBridgeApi,
|
||||||
ComelitSerialBridgeObject,
|
ComelitSerialBridgeObject,
|
||||||
ComelitVedoApi,
|
ComelitVedoApi,
|
||||||
|
ComelitVedoAreaObject,
|
||||||
|
ComelitVedoZoneObject,
|
||||||
)
|
)
|
||||||
from aiocomelit.const import (
|
from aiocomelit.const import (
|
||||||
BRIDGE,
|
BRIDGE,
|
||||||
@@ -30,7 +32,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed
|
|||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import _LOGGER, DOMAIN, SCAN_INTERVAL, ObjectClassType
|
from .const import _LOGGER, DOMAIN, SCAN_INTERVAL
|
||||||
|
|
||||||
type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator]
|
type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator]
|
||||||
|
|
||||||
@@ -75,7 +77,9 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
|||||||
|
|
||||||
def platform_device_info(
|
def platform_device_info(
|
||||||
self,
|
self,
|
||||||
object_class: ObjectClassType,
|
object_class: ComelitVedoZoneObject
|
||||||
|
| ComelitVedoAreaObject
|
||||||
|
| ComelitSerialBridgeObject,
|
||||||
object_type: str,
|
object_type: str,
|
||||||
) -> dr.DeviceInfo:
|
) -> dr.DeviceInfo:
|
||||||
"""Set platform device info."""
|
"""Set platform device info."""
|
||||||
|
|||||||
@@ -12,10 +12,9 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.restore_state import RestoreEntity
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
|
|
||||||
from .const import ObjectClassType
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
from .utils import bridge_api_call, new_device_listener
|
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -30,7 +29,7 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||||
"""Add entities for new monitors."""
|
"""Add entities for new monitors."""
|
||||||
entities = [
|
entities = [
|
||||||
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
||||||
|
|||||||
@@ -10,10 +10,9 @@ from homeassistant.components.light import ColorMode, LightEntity
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from .const import ObjectClassType
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
from .utils import bridge_api_call, new_device_listener
|
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -28,7 +27,7 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||||
"""Add entities for new monitors."""
|
"""Add entities for new monitors."""
|
||||||
entities = [
|
entities = [
|
||||||
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
||||||
|
|||||||
@@ -18,10 +18,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from homeassistant.helpers.typing import StateType
|
from homeassistant.helpers.typing import StateType
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import ObjectClassType
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
from .utils import new_device_listener
|
from .utils import DeviceType, new_device_listener
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -67,7 +66,7 @@ async def async_setup_bridge_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||||
"""Add entities for new monitors."""
|
"""Add entities for new monitors."""
|
||||||
entities = [
|
entities = [
|
||||||
ComelitBridgeSensorEntity(
|
ComelitBridgeSensorEntity(
|
||||||
@@ -94,7 +93,7 @@ async def async_setup_vedo_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||||
"""Add entities for new monitors."""
|
"""Add entities for new monitors."""
|
||||||
entities = [
|
entities = [
|
||||||
ComelitVedoSensorEntity(
|
ComelitVedoSensorEntity(
|
||||||
|
|||||||
@@ -11,10 +11,9 @@ from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from .const import ObjectClassType
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
from .utils import bridge_api_call, new_device_listener
|
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -29,7 +28,7 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||||
"""Add entities for new monitors."""
|
"""Add entities for new monitors."""
|
||||||
entities = [
|
entities = [
|
||||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||||
|
|||||||
@@ -2,9 +2,13 @@
|
|||||||
|
|
||||||
from collections.abc import Awaitable, Callable, Coroutine
|
from collections.abc import Awaitable, Callable, Coroutine
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import TYPE_CHECKING, Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
from aiocomelit.api import ComelitSerialBridgeObject
|
from aiocomelit.api import (
|
||||||
|
ComelitSerialBridgeObject,
|
||||||
|
ComelitVedoAreaObject,
|
||||||
|
ComelitVedoZoneObject,
|
||||||
|
)
|
||||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||||
from aiohttp import ClientSession, CookieJar
|
from aiohttp import ClientSession, CookieJar
|
||||||
|
|
||||||
@@ -18,10 +22,12 @@ from homeassistant.helpers import (
|
|||||||
entity_registry as er,
|
entity_registry as er,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .const import _LOGGER, DOMAIN, ObjectClassType
|
from .const import _LOGGER, DOMAIN
|
||||||
from .coordinator import ComelitBaseCoordinator
|
from .coordinator import ComelitBaseCoordinator
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
|
|
||||||
|
DeviceType = ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
|
||||||
|
|
||||||
|
|
||||||
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||||
"""Return a new aiohttp session."""
|
"""Return a new aiohttp session."""
|
||||||
@@ -120,7 +126,11 @@ def new_device_listener(
|
|||||||
coordinator: ComelitBaseCoordinator,
|
coordinator: ComelitBaseCoordinator,
|
||||||
new_devices_callback: Callable[
|
new_devices_callback: Callable[
|
||||||
[
|
[
|
||||||
list[ObjectClassType],
|
list[
|
||||||
|
ComelitSerialBridgeObject
|
||||||
|
| ComelitVedoAreaObject
|
||||||
|
| ComelitVedoZoneObject
|
||||||
|
],
|
||||||
str,
|
str,
|
||||||
],
|
],
|
||||||
None,
|
None,
|
||||||
@@ -132,10 +142,10 @@ def new_device_listener(
|
|||||||
|
|
||||||
def _check_devices() -> None:
|
def _check_devices() -> None:
|
||||||
"""Check for new devices and call callback with any new monitors."""
|
"""Check for new devices and call callback with any new monitors."""
|
||||||
if TYPE_CHECKING:
|
if not coordinator.data:
|
||||||
assert coordinator.data
|
return
|
||||||
|
|
||||||
new_devices: list[ObjectClassType] = []
|
new_devices: list[DeviceType] = []
|
||||||
for _id in coordinator.data[data_type]:
|
for _id in coordinator.data[data_type]:
|
||||||
if _id not in (id_list := known_devices.get(data_type, [])):
|
if _id not in (id_list := known_devices.get(data_type, [])):
|
||||||
known_devices.update({data_type: [*id_list, _id]})
|
known_devices.update({data_type: [*id_list, _id]})
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Literal
|
from typing import Literal
|
||||||
|
|
||||||
from hassil.recognize import RecognizeResult
|
from hassil.recognize import RecognizeResult
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -21,7 +21,6 @@ from homeassistant.core import (
|
|||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import config_validation as cv, intent
|
from homeassistant.helpers import config_validation as cv, intent
|
||||||
from homeassistant.helpers.entity_component import EntityComponent
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.loader import bind_hass
|
from homeassistant.loader import bind_hass
|
||||||
|
|
||||||
@@ -53,8 +52,6 @@ from .const import (
|
|||||||
DATA_COMPONENT,
|
DATA_COMPONENT,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
HOME_ASSISTANT_AGENT,
|
HOME_ASSISTANT_AGENT,
|
||||||
METADATA_CUSTOM_FILE,
|
|
||||||
METADATA_CUSTOM_SENTENCE,
|
|
||||||
SERVICE_PROCESS,
|
SERVICE_PROCESS,
|
||||||
SERVICE_RELOAD,
|
SERVICE_RELOAD,
|
||||||
ConversationEntityFeature,
|
ConversationEntityFeature,
|
||||||
@@ -269,13 +266,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
||||||
hass.data[DATA_COMPONENT] = entity_component
|
hass.data[DATA_COMPONENT] = entity_component
|
||||||
|
|
||||||
manager = get_agent_manager(hass)
|
agent_config = config.get(DOMAIN, {})
|
||||||
|
await async_setup_default_agent(
|
||||||
hass_config_path = hass.config.path()
|
hass, entity_component, config_intents=agent_config.get("intents", {})
|
||||||
config_intents = _get_config_intents(config, hass_config_path)
|
)
|
||||||
manager.update_config_intents(config_intents)
|
|
||||||
|
|
||||||
await async_setup_default_agent(hass, entity_component)
|
|
||||||
|
|
||||||
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
||||||
"""Parse text into commands."""
|
"""Parse text into commands."""
|
||||||
@@ -300,16 +294,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
async def handle_reload(service: ServiceCall) -> None:
|
async def handle_reload(service: ServiceCall) -> None:
|
||||||
"""Reload intents."""
|
"""Reload intents."""
|
||||||
language = service.data.get(ATTR_LANGUAGE)
|
agent = get_agent_manager(hass).default_agent
|
||||||
if language is None:
|
|
||||||
conf = await async_integration_yaml_config(hass, DOMAIN)
|
|
||||||
if conf is not None:
|
|
||||||
config_intents = _get_config_intents(conf, hass_config_path)
|
|
||||||
manager.update_config_intents(config_intents)
|
|
||||||
|
|
||||||
agent = manager.default_agent
|
|
||||||
if agent is not None:
|
if agent is not None:
|
||||||
await agent.async_reload(language=language)
|
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
|
||||||
|
|
||||||
hass.services.async_register(
|
hass.services.async_register(
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
@@ -326,27 +313,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str, Any]:
|
|
||||||
"""Return config intents."""
|
|
||||||
intents = config.get(DOMAIN, {}).get("intents", {})
|
|
||||||
return {
|
|
||||||
"intents": {
|
|
||||||
intent_name: {
|
|
||||||
"data": [
|
|
||||||
{
|
|
||||||
"sentences": sentences,
|
|
||||||
"metadata": {
|
|
||||||
METADATA_CUSTOM_SENTENCE: True,
|
|
||||||
METADATA_CUSTOM_FILE: hass_config_path,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
for intent_name, sentences in intents.items()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Set up a config entry."""
|
"""Set up a config entry."""
|
||||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||||
|
|||||||
@@ -147,7 +147,6 @@ class AgentManager:
|
|||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||||
self.default_agent: DefaultAgent | None = None
|
self.default_agent: DefaultAgent | None = None
|
||||||
self.config_intents: dict[str, Any] = {}
|
|
||||||
self.triggers_details: list[TriggerDetails] = []
|
self.triggers_details: list[TriggerDetails] = []
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@@ -200,16 +199,9 @@ class AgentManager:
|
|||||||
|
|
||||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||||
"""Set up the default agent."""
|
"""Set up the default agent."""
|
||||||
agent.update_config_intents(self.config_intents)
|
|
||||||
agent.update_triggers(self.triggers_details)
|
agent.update_triggers(self.triggers_details)
|
||||||
self.default_agent = agent
|
self.default_agent = agent
|
||||||
|
|
||||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
|
||||||
"""Update config intents."""
|
|
||||||
self.config_intents = intents
|
|
||||||
if self.default_agent is not None:
|
|
||||||
self.default_agent.update_config_intents(intents)
|
|
||||||
|
|
||||||
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
|
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
|
||||||
"""Register a trigger."""
|
"""Register a trigger."""
|
||||||
self.triggers_details.append(trigger_details)
|
self.triggers_details.append(trigger_details)
|
||||||
|
|||||||
@@ -30,7 +30,3 @@ class ConversationEntityFeature(IntFlag):
|
|||||||
"""Supported features of the conversation entity."""
|
"""Supported features of the conversation entity."""
|
||||||
|
|
||||||
CONTROL = 1
|
CONTROL = 1
|
||||||
|
|
||||||
|
|
||||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
|
||||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
|
||||||
|
|||||||
@@ -77,12 +77,7 @@ from homeassistant.util.json import JsonObjectType, json_loads_object
|
|||||||
|
|
||||||
from .agent_manager import get_agent_manager
|
from .agent_manager import get_agent_manager
|
||||||
from .chat_log import AssistantContent, ChatLog
|
from .chat_log import AssistantContent, ChatLog
|
||||||
from .const import (
|
from .const import DOMAIN, ConversationEntityFeature
|
||||||
DOMAIN,
|
|
||||||
METADATA_CUSTOM_FILE,
|
|
||||||
METADATA_CUSTOM_SENTENCE,
|
|
||||||
ConversationEntityFeature,
|
|
||||||
)
|
|
||||||
from .entity import ConversationEntity
|
from .entity import ConversationEntity
|
||||||
from .models import ConversationInput, ConversationResult
|
from .models import ConversationInput, ConversationResult
|
||||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||||
@@ -96,6 +91,8 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
|||||||
|
|
||||||
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||||
|
|
||||||
|
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||||
|
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||||
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
|
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
|
||||||
|
|
||||||
ERROR_SENTINEL = object()
|
ERROR_SENTINEL = object()
|
||||||
@@ -205,9 +202,10 @@ class IntentCache:
|
|||||||
async def async_setup_default_agent(
|
async def async_setup_default_agent(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entity_component: EntityComponent[ConversationEntity],
|
entity_component: EntityComponent[ConversationEntity],
|
||||||
|
config_intents: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up entity registry listener for the default agent."""
|
"""Set up entity registry listener for the default agent."""
|
||||||
agent = DefaultAgent(hass)
|
agent = DefaultAgent(hass, config_intents)
|
||||||
await entity_component.async_add_entities([agent])
|
await entity_component.async_add_entities([agent])
|
||||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||||
|
|
||||||
@@ -232,14 +230,14 @@ class DefaultAgent(ConversationEntity):
|
|||||||
_attr_name = "Home Assistant"
|
_attr_name = "Home Assistant"
|
||||||
_attr_supported_features = ConversationEntityFeature.CONTROL
|
_attr_supported_features = ConversationEntityFeature.CONTROL
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
|
||||||
"""Initialize the default agent."""
|
"""Initialize the default agent."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||||
self._load_intents_lock = asyncio.Lock()
|
self._load_intents_lock = asyncio.Lock()
|
||||||
|
|
||||||
# Intents from common conversation config
|
# intent -> [sentences]
|
||||||
self._config_intents: dict[str, Any] = {}
|
self._config_intents: dict[str, Any] = config_intents
|
||||||
|
|
||||||
# Sentences that will trigger a callback (skipping intent recognition)
|
# Sentences that will trigger a callback (skipping intent recognition)
|
||||||
self._triggers_details: list[TriggerDetails] = []
|
self._triggers_details: list[TriggerDetails] = []
|
||||||
@@ -1037,14 +1035,6 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# Intents have changed, so we must clear the cache
|
# Intents have changed, so we must clear the cache
|
||||||
self._intent_cache.clear()
|
self._intent_cache.clear()
|
||||||
|
|
||||||
@callback
|
|
||||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
|
||||||
"""Update config intents."""
|
|
||||||
self._config_intents = intents
|
|
||||||
|
|
||||||
# Intents have changed, so we must clear the cache
|
|
||||||
self._intent_cache.clear()
|
|
||||||
|
|
||||||
async def async_prepare(self, language: str | None = None) -> None:
|
async def async_prepare(self, language: str | None = None) -> None:
|
||||||
"""Load intents for a language."""
|
"""Load intents for a language."""
|
||||||
if language is None:
|
if language is None:
|
||||||
@@ -1169,10 +1159,33 @@ class DefaultAgent(ConversationEntity):
|
|||||||
custom_sentences_path,
|
custom_sentences_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
merge_dict(
|
# Load sentences from HA config for default language only
|
||||||
intents_dict,
|
if self._config_intents and (
|
||||||
self._config_intents,
|
self.hass.config.language in (language, language_variant)
|
||||||
)
|
):
|
||||||
|
hass_config_path = self.hass.config.path()
|
||||||
|
merge_dict(
|
||||||
|
intents_dict,
|
||||||
|
{
|
||||||
|
"intents": {
|
||||||
|
intent_name: {
|
||||||
|
"data": [
|
||||||
|
{
|
||||||
|
"sentences": sentences,
|
||||||
|
"metadata": {
|
||||||
|
METADATA_CUSTOM_SENTENCE: True,
|
||||||
|
METADATA_CUSTOM_FILE: hass_config_path,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
for intent_name, sentences in self._config_intents.items()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Loaded intents from configuration.yaml",
|
||||||
|
)
|
||||||
|
|
||||||
if not intents_dict:
|
if not intents_dict:
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||||
"integration_type": "entity",
|
"integration_type": "entity",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
|
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.10.28"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,5 +6,3 @@ DEFAULT_PORT = 10102
|
|||||||
|
|
||||||
CONF_SUPPORTED_MODES = "supported_modes"
|
CONF_SUPPORTED_MODES = "supported_modes"
|
||||||
CONF_SWING_SUPPORT = "swing_support"
|
CONF_SWING_SUPPORT = "swing_support"
|
||||||
MAX_RETRIES = 3
|
|
||||||
BACKOFF_BASE_DELAY = 2
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from pycoolmasternet_async import CoolMasterNet
|
from pycoolmasternet_async import CoolMasterNet
|
||||||
@@ -13,7 +12,7 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import BACKOFF_BASE_DELAY, DOMAIN, MAX_RETRIES
|
from .const import DOMAIN
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -47,34 +46,7 @@ class CoolmasterDataUpdateCoordinator(
|
|||||||
|
|
||||||
async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]:
|
async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]:
|
||||||
"""Fetch data from Coolmaster."""
|
"""Fetch data from Coolmaster."""
|
||||||
retries_left = MAX_RETRIES
|
try:
|
||||||
status: dict[str, CoolMasterNetUnit] = {}
|
return await self._coolmaster.status()
|
||||||
while retries_left > 0 and not status:
|
except OSError as error:
|
||||||
retries_left -= 1
|
raise UpdateFailed from error
|
||||||
try:
|
|
||||||
status = await self._coolmaster.status()
|
|
||||||
except OSError as error:
|
|
||||||
if retries_left == 0:
|
|
||||||
raise UpdateFailed(
|
|
||||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): {error}"
|
|
||||||
) from error
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Error communicating with coolmaster (%d retries left): %s",
|
|
||||||
retries_left,
|
|
||||||
str(error),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if status:
|
|
||||||
return status
|
|
||||||
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Error communicating with coolmaster: empty status received (%d retries left)",
|
|
||||||
retries_left,
|
|
||||||
)
|
|
||||||
|
|
||||||
backoff = BACKOFF_BASE_DELAY ** (MAX_RETRIES - retries_left)
|
|
||||||
await asyncio.sleep(backoff)
|
|
||||||
|
|
||||||
raise UpdateFailed(
|
|
||||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): empty status received"
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import logging
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from devolo_home_control_api.exceptions.gateway import GatewayOfflineError
|
from devolo_home_control_api.exceptions.gateway import GatewayOfflineError
|
||||||
@@ -23,8 +22,6 @@ from .const import DOMAIN, PLATFORMS
|
|||||||
|
|
||||||
type DevoloHomeControlConfigEntry = ConfigEntry[list[HomeControl]]
|
type DevoloHomeControlConfigEntry = ConfigEntry[list[HomeControl]]
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: DevoloHomeControlConfigEntry
|
hass: HomeAssistant, entry: DevoloHomeControlConfigEntry
|
||||||
@@ -47,29 +44,26 @@ async def async_setup_entry(
|
|||||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
|
||||||
)
|
)
|
||||||
|
|
||||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
try:
|
||||||
entry.runtime_data = []
|
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||||
offline_gateways = 0
|
entry.runtime_data = []
|
||||||
for gateway_id in gateway_ids:
|
for gateway_id in gateway_ids:
|
||||||
try:
|
|
||||||
entry.runtime_data.append(
|
entry.runtime_data.append(
|
||||||
await hass.async_add_executor_job(
|
await hass.async_add_executor_job(
|
||||||
partial(
|
partial(
|
||||||
HomeControl,
|
HomeControl,
|
||||||
gateway_id=gateway_id,
|
gateway_id=str(gateway_id),
|
||||||
mydevolo_instance=mydevolo,
|
mydevolo_instance=mydevolo,
|
||||||
zeroconf_instance=zeroconf_instance,
|
zeroconf_instance=zeroconf_instance,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except GatewayOfflineError:
|
except GatewayOfflineError as err:
|
||||||
offline_gateways += 1
|
|
||||||
_LOGGER.info("Central unit %s cannot be reached locally", gateway_id)
|
|
||||||
if len(gateway_ids) == offline_gateways:
|
|
||||||
raise ConfigEntryNotReady(
|
raise ConfigEntryNotReady(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="connection_failed",
|
translation_key="connection_failed",
|
||||||
)
|
translation_placeholders={"gateway_id": gateway_id},
|
||||||
|
) from err
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/devolo_home_control",
|
"documentation": "https://www.home-assistant.io/integrations/devolo_home_control",
|
||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["HomeControl", "Mydevolo", "MprmRest", "MprmWebsocket", "Mprm"],
|
"loggers": ["devolo_home_control_api"],
|
||||||
"requirements": ["devolo-home-control-api==0.19.0"],
|
"requirements": ["devolo-home-control-api==0.19.0"],
|
||||||
"zeroconf": ["_dvl-deviceapi._tcp.local."]
|
"zeroconf": ["_dvl-deviceapi._tcp.local."]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -58,7 +58,7 @@
|
|||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
"connection_failed": {
|
"connection_failed": {
|
||||||
"message": "Failed to connect to any devolo Home Control central unit."
|
"message": "Failed to connect to devolo Home Control central unit {gateway_id}."
|
||||||
},
|
},
|
||||||
"invalid_auth": {
|
"invalid_auth": {
|
||||||
"message": "Authentication failed. Please re-authenticate with your mydevolo account."
|
"message": "Authentication failed. Please re-authenticate with your mydevolo account."
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
},
|
},
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["pyecobee"],
|
"loggers": ["pyecobee"],
|
||||||
"requirements": ["python-ecobee-api==0.3.2"],
|
"requirements": ["python-ecobee-api==0.2.20"],
|
||||||
"single_config_entry": true,
|
"single_config_entry": true,
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -81,9 +81,6 @@
|
|||||||
"active_map": {
|
"active_map": {
|
||||||
"default": "mdi:floor-plan"
|
"default": "mdi:floor-plan"
|
||||||
},
|
},
|
||||||
"auto_empty": {
|
|
||||||
"default": "mdi:delete-empty"
|
|
||||||
},
|
|
||||||
"water_amount": {
|
"water_amount": {
|
||||||
"default": "mdi:water"
|
"default": "mdi:water"
|
||||||
},
|
},
|
||||||
@@ -92,6 +89,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
|
"auto_empty": {
|
||||||
|
"default": "mdi:delete-empty"
|
||||||
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"default": "mdi:alert-circle"
|
"default": "mdi:alert-circle"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
|
"requirements": ["py-sucks==0.9.11", "deebot-client==16.1.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,9 +5,8 @@ from dataclasses import dataclass
|
|||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
||||||
from deebot_client.command import CommandWithMessageHandling
|
|
||||||
from deebot_client.device import Device
|
from deebot_client.device import Device
|
||||||
from deebot_client.events import WorkModeEvent, auto_empty
|
from deebot_client.events import WorkModeEvent
|
||||||
from deebot_client.events.base import Event
|
from deebot_client.events.base import Event
|
||||||
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
||||||
from deebot_client.events.water_info import WaterAmountEvent
|
from deebot_client.events.water_info import WaterAmountEvent
|
||||||
@@ -35,9 +34,6 @@ class EcovacsSelectEntityDescription[EventT: Event](
|
|||||||
|
|
||||||
current_option_fn: Callable[[EventT], str | None]
|
current_option_fn: Callable[[EventT], str | None]
|
||||||
options_fn: Callable[[CapabilitySetTypes], list[str]]
|
options_fn: Callable[[CapabilitySetTypes], list[str]]
|
||||||
set_option_fn: Callable[[CapabilitySetTypes, str], CommandWithMessageHandling] = (
|
|
||||||
lambda cap, option: cap.set(option)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||||
@@ -62,14 +58,6 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
|||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
),
|
),
|
||||||
EcovacsSelectEntityDescription[auto_empty.AutoEmptyEvent](
|
|
||||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
|
||||||
current_option_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
|
||||||
options_fn=lambda cap: [get_name_key(freq) for freq in cap.types],
|
|
||||||
set_option_fn=lambda cap, option: cap.set(None, option),
|
|
||||||
key="auto_empty",
|
|
||||||
translation_key="auto_empty",
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -118,17 +106,14 @@ class EcovacsSelectEntity[EventT: Event](
|
|||||||
await super().async_added_to_hass()
|
await super().async_added_to_hass()
|
||||||
|
|
||||||
async def on_event(event: EventT) -> None:
|
async def on_event(event: EventT) -> None:
|
||||||
if (option := self.entity_description.current_option_fn(event)) is not None:
|
self._attr_current_option = self.entity_description.current_option_fn(event)
|
||||||
self._attr_current_option = option
|
self.async_write_ha_state()
|
||||||
self.async_write_ha_state()
|
|
||||||
|
|
||||||
self._subscribe(self._capability.event, on_event)
|
self._subscribe(self._capability.event, on_event)
|
||||||
|
|
||||||
async def async_select_option(self, option: str) -> None:
|
async def async_select_option(self, option: str) -> None:
|
||||||
"""Change the selected option."""
|
"""Change the selected option."""
|
||||||
await self._device.execute_command(
|
await self._device.execute_command(self._capability.set(option))
|
||||||
self.entity_description.set_option_fn(self._capability, option)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EcovacsActiveMapSelectEntity(
|
class EcovacsActiveMapSelectEntity(
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from deebot_client.events import (
|
|||||||
NetworkInfoEvent,
|
NetworkInfoEvent,
|
||||||
StatsEvent,
|
StatsEvent,
|
||||||
TotalStatsEvent,
|
TotalStatsEvent,
|
||||||
|
auto_empty,
|
||||||
station,
|
station,
|
||||||
)
|
)
|
||||||
from sucks import VacBot
|
from sucks import VacBot
|
||||||
@@ -158,6 +159,14 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
|||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=get_options(station.State),
|
options=get_options(station.State),
|
||||||
),
|
),
|
||||||
|
EcovacsSensorEntityDescription[auto_empty.AutoEmptyEvent](
|
||||||
|
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||||
|
value_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||||
|
key="auto_empty",
|
||||||
|
translation_key="auto_empty",
|
||||||
|
device_class=SensorDeviceClass.ENUM,
|
||||||
|
options=get_options(auto_empty.Frequency),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -129,16 +129,6 @@
|
|||||||
"active_map": {
|
"active_map": {
|
||||||
"name": "Active map"
|
"name": "Active map"
|
||||||
},
|
},
|
||||||
"auto_empty": {
|
|
||||||
"name": "Auto-empty frequency",
|
|
||||||
"state": {
|
|
||||||
"auto": "Auto",
|
|
||||||
"min_10": "10 minutes",
|
|
||||||
"min_15": "15 minutes",
|
|
||||||
"min_25": "25 minutes",
|
|
||||||
"smart": "Smart"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"water_amount": {
|
"water_amount": {
|
||||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
@@ -159,6 +149,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
|
"auto_empty": {
|
||||||
|
"name": "Auto-empty frequency",
|
||||||
|
"state": {
|
||||||
|
"auto": "Auto",
|
||||||
|
"smart": "Smart"
|
||||||
|
}
|
||||||
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"name": "Error",
|
"name": "Error",
|
||||||
"state_attributes": {
|
"state_attributes": {
|
||||||
|
|||||||
@@ -151,12 +151,14 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
|||||||
key="RAIN_COUNT_MM",
|
key="RAIN_COUNT_MM",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=1,
|
suggested_display_precision=1,
|
||||||
),
|
),
|
||||||
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
|
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
|
||||||
key="RAIN_COUNT_INCHES",
|
key="RAIN_COUNT_INCHES",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(
|
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(
|
||||||
|
|||||||
@@ -296,7 +296,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return await self.async_step_discovered_connection()
|
return await self.async_step_discovered_connection()
|
||||||
return await self.async_step_manual_connection()
|
return await self.async_step_manual_connection()
|
||||||
|
|
||||||
current_unique_ids = self._async_current_ids(include_ignore=False)
|
current_unique_ids = self._async_current_ids()
|
||||||
current_hosts = {
|
current_hosts = {
|
||||||
hostname_from_url(entry.data[CONF_HOST])
|
hostname_from_url(entry.data[CONF_HOST])
|
||||||
for entry in self._async_current_entries(include_ignore=False)
|
for entry in self._async_current_entries(include_ignore=False)
|
||||||
|
|||||||
@@ -15,5 +15,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["elkm1_lib"],
|
"loggers": ["elkm1_lib"],
|
||||||
"requirements": ["elkm1-lib==2.2.12"]
|
"requirements": ["elkm1-lib==2.2.11"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -189,7 +189,9 @@ class ElkPanel(ElkSensor):
|
|||||||
|
|
||||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||||
if self._elk.is_connected():
|
if self._elk.is_connected():
|
||||||
self._attr_native_value = "Paused" if self._elk.is_paused() else "Connected"
|
self._attr_native_value = (
|
||||||
|
"Paused" if self._element.remote_programming_status else "Connected"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self._attr_native_value = "Disconnected"
|
self._attr_native_value = "Disconnected"
|
||||||
|
|
||||||
|
|||||||
@@ -1,84 +0,0 @@
|
|||||||
rules:
|
|
||||||
# todo : add get_feed_list to the library
|
|
||||||
# todo : see if we can drop some extra attributes
|
|
||||||
# Bronze
|
|
||||||
action-setup:
|
|
||||||
status: exempt
|
|
||||||
comment: |
|
|
||||||
This integration does not provide additional actions.
|
|
||||||
appropriate-polling: done
|
|
||||||
brands: done
|
|
||||||
common-modules: done
|
|
||||||
config-flow-test-coverage:
|
|
||||||
status: todo
|
|
||||||
comment: |
|
|
||||||
test_reconfigure_api_error should use a mock config entry fixture
|
|
||||||
test_user_flow_failure should use a mock config entry fixture
|
|
||||||
move test_user_flow_* to the top of the file
|
|
||||||
config-flow: done
|
|
||||||
dependency-transparency: done
|
|
||||||
docs-actions:
|
|
||||||
status: exempt
|
|
||||||
comment: |
|
|
||||||
This integration does not provide additional actions.
|
|
||||||
docs-high-level-description: done
|
|
||||||
docs-installation-instructions: done
|
|
||||||
docs-removal-instructions: done
|
|
||||||
entity-event-setup:
|
|
||||||
status: exempt
|
|
||||||
comment: |
|
|
||||||
No events are explicitly registered by the integration.
|
|
||||||
entity-unique-id: done
|
|
||||||
has-entity-name: done
|
|
||||||
runtime-data: done
|
|
||||||
test-before-configure: done
|
|
||||||
test-before-setup: done
|
|
||||||
unique-config-entry: done
|
|
||||||
|
|
||||||
# Silver
|
|
||||||
action-exceptions: done
|
|
||||||
config-entry-unloading: done
|
|
||||||
docs-configuration-parameters: done
|
|
||||||
docs-installation-parameters: done
|
|
||||||
entity-unavailable: todo
|
|
||||||
integration-owner: done
|
|
||||||
log-when-unavailable: done
|
|
||||||
parallel-updates: todo
|
|
||||||
reauthentication-flow: todo
|
|
||||||
test-coverage:
|
|
||||||
status: todo
|
|
||||||
comment: |
|
|
||||||
test the entry state in test_failure
|
|
||||||
|
|
||||||
# Gold
|
|
||||||
devices: todo
|
|
||||||
diagnostics: todo
|
|
||||||
discovery-update-info: todo
|
|
||||||
discovery: todo
|
|
||||||
docs-data-update: done
|
|
||||||
docs-examples:
|
|
||||||
status: exempt
|
|
||||||
comment: |
|
|
||||||
This integration does not provide any automation
|
|
||||||
docs-known-limitations: todo
|
|
||||||
docs-supported-devices: todo
|
|
||||||
docs-supported-functions: done
|
|
||||||
docs-troubleshooting: done
|
|
||||||
docs-use-cases: todo
|
|
||||||
dynamic-devices: todo
|
|
||||||
entity-category: todo
|
|
||||||
entity-device-class:
|
|
||||||
status: todo
|
|
||||||
comment: change device_class=SensorDeviceClass.SIGNAL_STRENGTH to SOUND_PRESSURE
|
|
||||||
entity-disabled-by-default: todo
|
|
||||||
entity-translations: done
|
|
||||||
exception-translations: todo
|
|
||||||
icon-translations: todo
|
|
||||||
reconfiguration-flow: done
|
|
||||||
repair-issues: todo
|
|
||||||
stale-devices: todo
|
|
||||||
|
|
||||||
# Platinum
|
|
||||||
async-dependency: done
|
|
||||||
inject-websession: done
|
|
||||||
strict-typing: todo
|
|
||||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from collections.abc import Awaitable, Callable
|
from collections.abc import Awaitable, Callable
|
||||||
from typing import Literal, NotRequired, TypedDict
|
from typing import Literal, TypedDict
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -29,7 +29,7 @@ async def async_get_manager(hass: HomeAssistant) -> EnergyManager:
|
|||||||
class FlowFromGridSourceType(TypedDict):
|
class FlowFromGridSourceType(TypedDict):
|
||||||
"""Dictionary describing the 'from' stat for the grid source."""
|
"""Dictionary describing the 'from' stat for the grid source."""
|
||||||
|
|
||||||
# statistic_id of an energy meter (kWh)
|
# statistic_id of a an energy meter (kWh)
|
||||||
stat_energy_from: str
|
stat_energy_from: str
|
||||||
|
|
||||||
# statistic_id of costs ($) incurred from the energy meter
|
# statistic_id of costs ($) incurred from the energy meter
|
||||||
@@ -58,14 +58,6 @@ class FlowToGridSourceType(TypedDict):
|
|||||||
number_energy_price: float | None # Price for energy ($/kWh)
|
number_energy_price: float | None # Price for energy ($/kWh)
|
||||||
|
|
||||||
|
|
||||||
class GridPowerSourceType(TypedDict):
|
|
||||||
"""Dictionary holding the source of grid power consumption."""
|
|
||||||
|
|
||||||
# statistic_id of a power meter (kW)
|
|
||||||
# negative values indicate grid return
|
|
||||||
stat_rate: str
|
|
||||||
|
|
||||||
|
|
||||||
class GridSourceType(TypedDict):
|
class GridSourceType(TypedDict):
|
||||||
"""Dictionary holding the source of grid energy consumption."""
|
"""Dictionary holding the source of grid energy consumption."""
|
||||||
|
|
||||||
@@ -73,7 +65,6 @@ class GridSourceType(TypedDict):
|
|||||||
|
|
||||||
flow_from: list[FlowFromGridSourceType]
|
flow_from: list[FlowFromGridSourceType]
|
||||||
flow_to: list[FlowToGridSourceType]
|
flow_to: list[FlowToGridSourceType]
|
||||||
power: NotRequired[list[GridPowerSourceType]]
|
|
||||||
|
|
||||||
cost_adjustment_day: float
|
cost_adjustment_day: float
|
||||||
|
|
||||||
@@ -84,7 +75,6 @@ class SolarSourceType(TypedDict):
|
|||||||
type: Literal["solar"]
|
type: Literal["solar"]
|
||||||
|
|
||||||
stat_energy_from: str
|
stat_energy_from: str
|
||||||
stat_rate: NotRequired[str]
|
|
||||||
config_entry_solar_forecast: list[str] | None
|
config_entry_solar_forecast: list[str] | None
|
||||||
|
|
||||||
|
|
||||||
@@ -95,8 +85,6 @@ class BatterySourceType(TypedDict):
|
|||||||
|
|
||||||
stat_energy_from: str
|
stat_energy_from: str
|
||||||
stat_energy_to: str
|
stat_energy_to: str
|
||||||
# positive when discharging, negative when charging
|
|
||||||
stat_rate: NotRequired[str]
|
|
||||||
|
|
||||||
|
|
||||||
class GasSourceType(TypedDict):
|
class GasSourceType(TypedDict):
|
||||||
@@ -148,15 +136,12 @@ class DeviceConsumption(TypedDict):
|
|||||||
# This is an ever increasing value
|
# This is an ever increasing value
|
||||||
stat_consumption: str
|
stat_consumption: str
|
||||||
|
|
||||||
# Instantaneous rate of flow: W, L/min or m³/h
|
|
||||||
stat_rate: NotRequired[str]
|
|
||||||
|
|
||||||
# An optional custom name for display in energy graphs
|
# An optional custom name for display in energy graphs
|
||||||
name: str | None
|
name: str | None
|
||||||
|
|
||||||
# An optional statistic_id identifying a device
|
# An optional statistic_id identifying a device
|
||||||
# that includes this device's consumption in its total
|
# that includes this device's consumption in its total
|
||||||
included_in_stat: NotRequired[str]
|
included_in_stat: str | None
|
||||||
|
|
||||||
|
|
||||||
class EnergyPreferences(TypedDict):
|
class EnergyPreferences(TypedDict):
|
||||||
@@ -209,12 +194,6 @@ FLOW_TO_GRID_SOURCE_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
GRID_POWER_SOURCE_SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required("stat_rate"): str,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_unique_value_validator(key: str) -> Callable[[list[dict]], list[dict]]:
|
def _generate_unique_value_validator(key: str) -> Callable[[list[dict]], list[dict]]:
|
||||||
"""Generate a validator that ensures a value is only used once."""
|
"""Generate a validator that ensures a value is only used once."""
|
||||||
@@ -245,10 +224,6 @@ GRID_SOURCE_SCHEMA = vol.Schema(
|
|||||||
[FLOW_TO_GRID_SOURCE_SCHEMA],
|
[FLOW_TO_GRID_SOURCE_SCHEMA],
|
||||||
_generate_unique_value_validator("stat_energy_to"),
|
_generate_unique_value_validator("stat_energy_to"),
|
||||||
),
|
),
|
||||||
vol.Optional("power"): vol.All(
|
|
||||||
[GRID_POWER_SOURCE_SCHEMA],
|
|
||||||
_generate_unique_value_validator("stat_rate"),
|
|
||||||
),
|
|
||||||
vol.Required("cost_adjustment_day"): vol.Coerce(float),
|
vol.Required("cost_adjustment_day"): vol.Coerce(float),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -256,7 +231,6 @@ SOLAR_SOURCE_SCHEMA = vol.Schema(
|
|||||||
{
|
{
|
||||||
vol.Required("type"): "solar",
|
vol.Required("type"): "solar",
|
||||||
vol.Required("stat_energy_from"): str,
|
vol.Required("stat_energy_from"): str,
|
||||||
vol.Optional("stat_rate"): str,
|
|
||||||
vol.Optional("config_entry_solar_forecast"): vol.Any([str], None),
|
vol.Optional("config_entry_solar_forecast"): vol.Any([str], None),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -265,7 +239,6 @@ BATTERY_SOURCE_SCHEMA = vol.Schema(
|
|||||||
vol.Required("type"): "battery",
|
vol.Required("type"): "battery",
|
||||||
vol.Required("stat_energy_from"): str,
|
vol.Required("stat_energy_from"): str,
|
||||||
vol.Required("stat_energy_to"): str,
|
vol.Required("stat_energy_to"): str,
|
||||||
vol.Optional("stat_rate"): str,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
GAS_SOURCE_SCHEMA = vol.Schema(
|
GAS_SOURCE_SCHEMA = vol.Schema(
|
||||||
@@ -321,7 +294,6 @@ ENERGY_SOURCE_SCHEMA = vol.All(
|
|||||||
DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required("stat_consumption"): str,
|
vol.Required("stat_consumption"): str,
|
||||||
vol.Optional("stat_rate"): str,
|
|
||||||
vol.Optional("name"): str,
|
vol.Optional("name"): str,
|
||||||
vol.Optional("included_in_stat"): str,
|
vol.Optional("included_in_stat"): str,
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user