mirror of
https://github.com/home-assistant/core.git
synced 2026-01-04 06:07:56 +00:00
Compare commits
170 Commits
2025.7.2
...
enforce_en
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
df446f5240 | ||
|
|
c92873bbff | ||
|
|
5fea4915ef | ||
|
|
8fa016059d | ||
|
|
61a29db72c | ||
|
|
5a3aa7874d | ||
|
|
12e2493c42 | ||
|
|
659cd42739 | ||
|
|
7fcea17e83 | ||
|
|
30a85c40da | ||
|
|
57a8f1e0cc | ||
|
|
78aeae577d | ||
|
|
3f95cb37e6 | ||
|
|
12aef4aae5 | ||
|
|
2e12db001d | ||
|
|
573325be97 | ||
|
|
7021fe7495 | ||
|
|
b7999755bd | ||
|
|
99f7a031d6 | ||
|
|
8fc31283b7 | ||
|
|
5ff698c78d | ||
|
|
9469c6ad1c | ||
|
|
35f0505c7b | ||
|
|
a180cabea9 | ||
|
|
4f7348b8bc | ||
|
|
ddf56f053b | ||
|
|
9719d2ef2b | ||
|
|
2afe475234 | ||
|
|
23c304fc75 | ||
|
|
84645d0ca6 | ||
|
|
2bdfc8cf5e | ||
|
|
603e277a5b | ||
|
|
38a7b21052 | ||
|
|
bf74ba990a | ||
|
|
70856bd92a | ||
|
|
be6b624081 | ||
|
|
217fbb2849 | ||
|
|
22a14da19c | ||
|
|
20f5d85800 | ||
|
|
88feb5139b | ||
|
|
90cbe272a0 | ||
|
|
511b739bf6 | ||
|
|
9961a499ee | ||
|
|
d8c7ed473b | ||
|
|
2c30a5a14c | ||
|
|
5e3fc858d8 | ||
|
|
f03af213d4 | ||
|
|
1e3ebd5650 | ||
|
|
53936ab062 | ||
|
|
b52a248def | ||
|
|
ea70229426 | ||
|
|
741a3d5009 | ||
|
|
ee8830cc77 | ||
|
|
7fbf25e862 | ||
|
|
e642cd45ae | ||
|
|
179e1c2b00 | ||
|
|
52a99aea0c | ||
|
|
c7b2f236be | ||
|
|
a6e3da43ca | ||
|
|
4d58024d5d | ||
|
|
c7603b39ec | ||
|
|
c17ee0d123 | ||
|
|
97c1e21a69 | ||
|
|
c9a6b1fd45 | ||
|
|
05ceee568e | ||
|
|
08a6b38699 | ||
|
|
4add346272 | ||
|
|
369c8d1e0d | ||
|
|
25ab47a587 | ||
|
|
617ea1925c | ||
|
|
8bacab4f9c | ||
|
|
6d28b99344 | ||
|
|
bbd1cbf5c9 | ||
|
|
43450d4489 | ||
|
|
f8c052e0ce | ||
|
|
1f3bdfc7b7 | ||
|
|
0652bffd68 | ||
|
|
8322611099 | ||
|
|
134967b817 | ||
|
|
39abae36f0 | ||
|
|
227760f203 | ||
|
|
969809456e | ||
|
|
d2e8a48b2c | ||
|
|
ea6332ee42 | ||
|
|
91c3b43d7f | ||
|
|
1d82d44794 | ||
|
|
571376badc | ||
|
|
32236b2f4d | ||
|
|
18c1953bc5 | ||
|
|
d874c28dc9 | ||
|
|
19d89c8952 | ||
|
|
e3ba1f34ca | ||
|
|
b630fb0520 | ||
|
|
5129f89086 | ||
|
|
0be0e22e76 | ||
|
|
b8500b338a | ||
|
|
4cab3a0465 | ||
|
|
ff711324d5 | ||
|
|
113e7dc003 | ||
|
|
2120ff6a0a | ||
|
|
8ee5c30754 | ||
|
|
a1518b96c4 | ||
|
|
bba7f5c3f0 | ||
|
|
8a5671af76 | ||
|
|
8a18dea8c7 | ||
|
|
4b02f22724 | ||
|
|
7229c2ca2c | ||
|
|
d83eddf13b | ||
|
|
4a192a7b09 | ||
|
|
58c434887e | ||
|
|
78c2405e61 | ||
|
|
8cc4105984 | ||
|
|
917f1e4c6f | ||
|
|
3879f6d2ef | ||
|
|
78060e4833 | ||
|
|
fda66c4be4 | ||
|
|
21131d00b3 | ||
|
|
a84313de33 | ||
|
|
c73346e6b3 | ||
|
|
55a37a2936 | ||
|
|
e481f14335 | ||
|
|
1ca03c8ae9 | ||
|
|
61b43ca1fc | ||
|
|
1b2be083c2 | ||
|
|
4bdf3d6f30 | ||
|
|
43535ede8b | ||
|
|
9bd0762799 | ||
|
|
1bb653b4f7 | ||
|
|
2655edcfc8 | ||
|
|
7a08edc3dd | ||
|
|
b3131355b0 | ||
|
|
06d04c001d | ||
|
|
babecdf32c | ||
|
|
17cd39748b | ||
|
|
c2f1e86a4e | ||
|
|
61a32466b6 | ||
|
|
aef08091f8 | ||
|
|
1416f0f1e0 | ||
|
|
af7b1a76bc | ||
|
|
bf88fcd5bf | ||
|
|
35478e3162 | ||
|
|
69af74a593 | ||
|
|
b4dd912bee | ||
|
|
b5821ef499 | ||
|
|
1a92d4530e | ||
|
|
7b80c1c693 | ||
|
|
e7cc03c1d9 | ||
|
|
69f0b6244a | ||
|
|
01205f8a14 | ||
|
|
68924d23ab | ||
|
|
40f553a007 | ||
|
|
bc46894b74 | ||
|
|
6f4615f012 | ||
|
|
4244d2f66f | ||
|
|
a73dafe097 | ||
|
|
be49296547 | ||
|
|
d55ecd885e | ||
|
|
076248c455 | ||
|
|
13ce27c94c | ||
|
|
4b9b08ece5 | ||
|
|
79df38eff2 | ||
|
|
fb133664e4 | ||
|
|
38669ce96c | ||
|
|
651b33d49b | ||
|
|
3b64db5f76 | ||
|
|
0f95fe566c | ||
|
|
6290facffb | ||
|
|
f0a78aadbe | ||
|
|
345ec97dd5 | ||
|
|
1286b5d9d8 |
1235
.github/copilot-instructions.md
vendored
1235
.github/copilot-instructions.md
vendored
File diff suppressed because it is too large
Load Diff
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 3
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.7"
|
||||
HA_SHORT_VERSION: "2025.8"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.29.0
|
||||
uses: github/codeql-action/init@v3.29.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.29.0
|
||||
uses: github/codeql-action/analyze@v3.29.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -137,4 +137,8 @@ tmp_cache
|
||||
.ropeproject
|
||||
|
||||
# Will be created from script/split_tests.py
|
||||
pytest_buckets.txt
|
||||
pytest_buckets.txt
|
||||
|
||||
# AI tooling
|
||||
.claude
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.0
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1553,6 +1553,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/technove/ @Moustachauve
|
||||
/homeassistant/components/tedee/ @patrickhilker @zweckj
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/telegram_bot/ @hanwg
|
||||
/tests/components/telegram_bot/ @hanwg
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @Petro31 @home-assistant/core
|
||||
|
||||
@@ -75,7 +75,6 @@ from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
backup,
|
||||
category_registry,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
@@ -607,7 +606,7 @@ async def async_enable_logging(
|
||||
)
|
||||
threading.excepthook = lambda args: logging.getLogger().exception(
|
||||
"Uncaught thread exception",
|
||||
exc_info=( # type: ignore[arg-type] # noqa: LOG014
|
||||
exc_info=( # type: ignore[arg-type]
|
||||
args.exc_type,
|
||||
args.exc_value,
|
||||
args.exc_traceback,
|
||||
@@ -880,10 +879,6 @@ async def _async_set_up_integrations(
|
||||
if "recorder" in all_domains:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in all_domains:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stages: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group, timeout)
|
||||
@@ -1061,5 +1056,5 @@ async def _async_setup_multi_components(
|
||||
_LOGGER.error(
|
||||
"Error setting up integration %s - received exception",
|
||||
domain,
|
||||
exc_info=(type(result), result, result.__traceback__), # noqa: LOG014
|
||||
exc_info=(type(result), result, result.__traceback__),
|
||||
)
|
||||
|
||||
@@ -336,7 +336,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
||||
keys=[AOD_WEATHER, AOD_WIND_DIRECTION],
|
||||
name="Wind bearing",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
),
|
||||
AemetSensorEntityDescription(
|
||||
|
||||
@@ -10,7 +10,7 @@ from aioairq.core import AirQ, identify_warming_up_sensors
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
@@ -39,7 +39,7 @@ class AirQCoordinator(DataUpdateCoordinator):
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
session = async_create_clientsession(hass)
|
||||
self.airq = AirQ(
|
||||
entry.data[CONF_IP_ADDRESS], entry.data[CONF_PASSWORD], session
|
||||
)
|
||||
|
||||
@@ -29,5 +29,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bo
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await entry.runtime_data.api.close()
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
coordinator = entry.runtime_data
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
await coordinator.api.close()
|
||||
|
||||
return unload_ok
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.1.19"]
|
||||
"requirements": ["aioamazondevices==3.1.22"]
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import alexa_api_call
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -70,6 +71,7 @@ class AmazonNotifyEntity(AmazonEntity, NotifyEntity):
|
||||
|
||||
entity_description: AmazonNotifyEntityDescription
|
||||
|
||||
@alexa_api_call
|
||||
async def async_send_message(
|
||||
self, message: str, title: str | None = None, **kwargs: Any
|
||||
) -> None:
|
||||
|
||||
@@ -26,7 +26,7 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
{
|
||||
"common": {
|
||||
"data_country": "Country code",
|
||||
"data_code": "One-time password (OTP code)",
|
||||
"data_description_country": "The country of your Amazon account.",
|
||||
"data_description_country": "The country where your Amazon account is registered.",
|
||||
"data_description_username": "The email address of your Amazon account.",
|
||||
"data_description_password": "The password of your Amazon account.",
|
||||
"data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported."
|
||||
@@ -12,10 +11,10 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"country": "[%key:component::alexa_devices::common::data_country%]",
|
||||
"country": "[%key:common::config_flow::data::country%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
"code": "[%key:component::alexa_devices::common::data_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"country": "[%key:component::alexa_devices::common::data_description_country%]",
|
||||
@@ -71,5 +70,13 @@
|
||||
"name": "Do not disturb"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Error connecting: {error}"
|
||||
},
|
||||
"cannot_retrieve_data": {
|
||||
"message": "Error retrieving data: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import alexa_api_call
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -60,6 +61,7 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
||||
|
||||
entity_description: AmazonSwitchEntityDescription
|
||||
|
||||
@alexa_api_call
|
||||
async def _switch_set_state(self, state: bool) -> None:
|
||||
"""Set desired switch state."""
|
||||
method = getattr(self.coordinator.api, self.entity_description.method)
|
||||
|
||||
40
homeassistant/components/alexa_devices/utils.py
Normal file
40
homeassistant/components/alexa_devices/utils.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Utils for Alexa Devices."""
|
||||
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AmazonEntity
|
||||
|
||||
|
||||
def alexa_api_call[_T: AmazonEntity, **_P](
|
||||
func: Callable[Concatenate[_T, _P], Awaitable[None]],
|
||||
) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]:
|
||||
"""Catch Alexa API call exceptions."""
|
||||
|
||||
@wraps(func)
|
||||
async def cmd_wrapper(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
"""Wrap all command methods."""
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except CannotConnect as err:
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotRetrieveData as err:
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_retrieve_data",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
return cmd_wrapper
|
||||
@@ -17,7 +17,13 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
)
|
||||
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -55,6 +61,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -63,6 +71,13 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_update_options(
|
||||
hass: HomeAssistant, entry: AnthropicConfigEntry
|
||||
) -> None:
|
||||
"""Update options."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
"""Migrate integration entry structure."""
|
||||
|
||||
@@ -117,12 +132,49 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
)
|
||||
else:
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
remove_config_subentry_id=None,
|
||||
)
|
||||
|
||||
if not use_existing:
|
||||
await hass.config_entries.async_remove(entry.entry_id)
|
||||
else:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
title=DEFAULT_CONVERSATION_NAME,
|
||||
options={},
|
||||
version=2,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 2 and entry.minor_version == 1:
|
||||
# Correct broken device migration in Home Assistant Core 2025.7.0b0-2025.7.0b1
|
||||
device_registry = dr.async_get(hass)
|
||||
for device in dr.async_entries_for_config_entry(
|
||||
device_registry, entry.entry_id
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
remove_config_subentry_id=None,
|
||||
)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, minor_version=2)
|
||||
|
||||
LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -75,6 +75,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anthropic."""
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -1,69 +1,17 @@
|
||||
"""Conversation support for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
import json
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
from typing import Literal
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
from .const import CONF_PROMPT, DOMAIN
|
||||
from .entity import AnthropicBaseLLMEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -82,253 +30,10 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> ToolParam:
|
||||
"""Format tool specification."""
|
||||
return ToolParam(
|
||||
name=tool.name,
|
||||
description=tool.description or "",
|
||||
input_schema=convert(tool.parameters, custom_serializer=custom_serializer),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
messages: list[MessageParam] = []
|
||||
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
tool_result_block,
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
|
||||
elif isinstance(content, conversation.UserContent):
|
||||
# Combine consequent user messages
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=content.content,
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
TextBlockParam(type="text", text=content.content),
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
elif isinstance(content, conversation.AssistantContent):
|
||||
# Combine consequent assistant messages
|
||||
if not messages or messages[-1]["role"] != "assistant":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="assistant",
|
||||
content=[],
|
||||
)
|
||||
)
|
||||
|
||||
if content.content:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
- RawMessageStartEvent with no content
|
||||
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- ...
|
||||
- RawContentBlockDeltaEvent with a SignatureDelta
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
|
||||
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
|
||||
- RawContentBlockStartEvent with an empty TextBlock
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- ...
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with ToolUseBlock specifying the function name
|
||||
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||
- ...
|
||||
- RawContentBlockStopEvent
|
||||
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||
- RawMessageStopEvent(type='message_stop')
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if result is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_message: MessageParam | None = None
|
||||
current_block: (
|
||||
TextBlockParam
|
||||
| ToolUseBlockParam
|
||||
| ThinkingBlockParam
|
||||
| RedactedThinkingBlockParam
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
input_usage: Usage | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
input_usage = response.message.usage
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
current_block = TextBlockParam(
|
||||
type="text", text=response.content_block.text
|
||||
)
|
||||
yield {"role": "assistant"}
|
||||
if response.content_block.text:
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
current_block = ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=response.content_block.thinking,
|
||||
signature=response.content_block.signature,
|
||||
)
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
current_block = RedactedThinkingBlockParam(
|
||||
type="redacted_thinking", data=response.content_block.data
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected delta without a block")
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
text_block = cast(TextBlockParam, current_block)
|
||||
text_block["text"] += response.delta.text
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["thinking"] += response.delta.thinking
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["signature"] += response.delta.signature
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
# tool block
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_block["input"] = tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_block["id"],
|
||||
tool_name=current_block["name"],
|
||||
tool_args=tool_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
elif current_block["type"] == "thinking":
|
||||
# thinking block
|
||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
conversation.ConversationEntity,
|
||||
conversation.AbstractConversationAgent,
|
||||
AnthropicBaseLLMEntity,
|
||||
):
|
||||
"""Anthropic conversation agent."""
|
||||
|
||||
@@ -336,17 +41,7 @@ class AnthropicConversationEntity(
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
super().__init__(entry, subentry)
|
||||
if self.subentry.data.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
@@ -357,13 +52,6 @@ class AnthropicConversationEntity(
|
||||
"""Return a list of supported languages."""
|
||||
return MATCH_ALL
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
self.entry.async_on_unload(
|
||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||
)
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
@@ -394,77 +82,3 @@ class AnthropicConversationEntity(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
model_args = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"tools": tools or NOT_GIVEN,
|
||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
"system": system.content,
|
||||
"stream": True,
|
||||
}
|
||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
try:
|
||||
stream = await client.messages.create(**model_args)
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
) from err
|
||||
|
||||
messages.extend(
|
||||
_convert_content(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
# Reload as we update device info + entity name + supported features
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
393
homeassistant/components/anthropic/entity.py
Normal file
393
homeassistant/components/anthropic/entity.py
Normal file
@@ -0,0 +1,393 @@
|
||||
"""Base entity for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
import json
|
||||
from typing import Any, cast
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> ToolParam:
|
||||
"""Format tool specification."""
|
||||
return ToolParam(
|
||||
name=tool.name,
|
||||
description=tool.description or "",
|
||||
input_schema=convert(tool.parameters, custom_serializer=custom_serializer),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
messages: list[MessageParam] = []
|
||||
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
tool_result_block,
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
|
||||
elif isinstance(content, conversation.UserContent):
|
||||
# Combine consequent user messages
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=content.content,
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
TextBlockParam(type="text", text=content.content),
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
elif isinstance(content, conversation.AssistantContent):
|
||||
# Combine consequent assistant messages
|
||||
if not messages or messages[-1]["role"] != "assistant":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="assistant",
|
||||
content=[],
|
||||
)
|
||||
)
|
||||
|
||||
if content.content:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
- RawMessageStartEvent with no content
|
||||
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- ...
|
||||
- RawContentBlockDeltaEvent with a SignatureDelta
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
|
||||
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
|
||||
- RawContentBlockStartEvent with an empty TextBlock
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- ...
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with ToolUseBlock specifying the function name
|
||||
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||
- ...
|
||||
- RawContentBlockStopEvent
|
||||
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||
- RawMessageStopEvent(type='message_stop')
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if result is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_message: MessageParam | None = None
|
||||
current_block: (
|
||||
TextBlockParam
|
||||
| ToolUseBlockParam
|
||||
| ThinkingBlockParam
|
||||
| RedactedThinkingBlockParam
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
input_usage: Usage | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
input_usage = response.message.usage
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
current_block = TextBlockParam(
|
||||
type="text", text=response.content_block.text
|
||||
)
|
||||
yield {"role": "assistant"}
|
||||
if response.content_block.text:
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
current_block = ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=response.content_block.thinking,
|
||||
signature=response.content_block.signature,
|
||||
)
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
current_block = RedactedThinkingBlockParam(
|
||||
type="redacted_thinking", data=response.content_block.data
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected delta without a block")
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
text_block = cast(TextBlockParam, current_block)
|
||||
text_block["text"] += response.delta.text
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["thinking"] += response.delta.thinking
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["signature"] += response.delta.signature
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
# tool block
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_block["input"] = tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_block["id"],
|
||||
tool_name=current_block["name"],
|
||||
tool_args=tool_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
elif current_block["type"] == "thinking":
|
||||
# thinking block
|
||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicBaseLLMEntity(Entity):
|
||||
"""Anthropic base LLM entity."""
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
model_args = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"tools": tools or NOT_GIVEN,
|
||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
"system": system.content,
|
||||
"stream": True,
|
||||
}
|
||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
try:
|
||||
stream = await client.messages.create(**model_args)
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
) from err
|
||||
|
||||
messages.extend(
|
||||
_convert_content(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
@@ -71,9 +71,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional("message"): str,
|
||||
vol.Optional("media_id"): str,
|
||||
vol.Optional("media_id"): _media_id_validator,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): _media_id_validator,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("message", "media_id"),
|
||||
@@ -81,15 +81,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"async_internal_announce",
|
||||
[AssistSatelliteEntityFeature.ANNOUNCE],
|
||||
)
|
||||
|
||||
component.async_register_entity_service(
|
||||
"start_conversation",
|
||||
vol.All(
|
||||
cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional("start_message"): str,
|
||||
vol.Optional("start_media_id"): str,
|
||||
vol.Optional("start_media_id"): _media_id_validator,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): _media_id_validator,
|
||||
vol.Optional("extra_system_prompt"): str,
|
||||
}
|
||||
),
|
||||
@@ -135,9 +136,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_domain(DOMAIN),
|
||||
vol.Optional("question"): str,
|
||||
vol.Optional("question_media_id"): str,
|
||||
vol.Optional("question_media_id"): _media_id_validator,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): _media_id_validator,
|
||||
vol.Optional("answers"): [
|
||||
{
|
||||
vol.Required("id"): str,
|
||||
@@ -204,3 +205,20 @@ def has_one_non_empty_item(value: list[str]) -> list[str]:
|
||||
raise vol.Invalid("sentences cannot be empty")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
# Validator for media_id fields that accepts both string and media selector format
|
||||
_media_id_validator = vol.Any(
|
||||
cv.string, # Plain string format
|
||||
vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("media_content_id"): cv.string,
|
||||
vol.Required("media_content_type"): cv.string,
|
||||
vol.Remove("metadata"): dict, # Ignore metadata if present
|
||||
}
|
||||
),
|
||||
# Extract media_content_id from media selector format
|
||||
lambda x: x["media_content_id"],
|
||||
),
|
||||
)
|
||||
|
||||
@@ -14,7 +14,9 @@ announce:
|
||||
media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
@@ -23,7 +25,9 @@ announce:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
start_conversation:
|
||||
target:
|
||||
entity:
|
||||
@@ -40,7 +44,9 @@ start_conversation:
|
||||
start_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
extra_system_prompt:
|
||||
required: false
|
||||
selector:
|
||||
@@ -53,7 +59,9 @@ start_conversation:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
ask_question:
|
||||
fields:
|
||||
entity_id:
|
||||
@@ -72,7 +80,9 @@ ask_question:
|
||||
question_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
@@ -81,7 +91,9 @@ ask_question:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
answers:
|
||||
required: false
|
||||
selector:
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -37,7 +37,6 @@ from .manager import (
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
ManagerStateEvent,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
@@ -45,6 +44,7 @@ from .manager import (
|
||||
WrittenBackup,
|
||||
)
|
||||
from .models import AddonInfo, AgentBackup, BackupNotFound, Folder
|
||||
from .services import async_setup_services
|
||||
from .util import suggested_filename, suggested_filename_from_name_date
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
@@ -71,12 +71,12 @@ __all__ = [
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"ManagerBackup",
|
||||
"ManagerStateEvent",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"RestoreBackupStage",
|
||||
"RestoreBackupState",
|
||||
"WrittenBackup",
|
||||
"async_get_manager",
|
||||
"suggested_filename",
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
@@ -103,39 +103,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
backup_manager = BackupManager(hass, reader_writer)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
try:
|
||||
await backup_manager.async_setup()
|
||||
except Exception as err:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_exception(err)
|
||||
raise
|
||||
else:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_result(None)
|
||||
await backup_manager.async_setup()
|
||||
|
||||
async_register_websocket_handlers(hass, with_hassio)
|
||||
|
||||
async def async_handle_create_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating backups."""
|
||||
agent_id = list(backup_manager.local_backup_agents)[0]
|
||||
await backup_manager.async_create_backup(
|
||||
agent_ids=[agent_id],
|
||||
include_addons=None,
|
||||
include_all_addons=False,
|
||||
include_database=True,
|
||||
include_folders=None,
|
||||
include_homeassistant=True,
|
||||
name=None,
|
||||
password=None,
|
||||
)
|
||||
|
||||
async def async_handle_create_automatic_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating automatic backups."""
|
||||
await backup_manager.async_create_automatic_backup()
|
||||
|
||||
if not with_hassio:
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
hass.services.async_register(
|
||||
DOMAIN, "create_automatic", async_handle_create_automatic_service
|
||||
)
|
||||
async_setup_services(hass)
|
||||
|
||||
async_register_http_views(hass)
|
||||
|
||||
@@ -164,3 +136,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bo
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_manager(hass: HomeAssistant) -> BackupManager:
|
||||
"""Get the backup manager instance.
|
||||
|
||||
Raises HomeAssistantError if the backup integration is not available.
|
||||
"""
|
||||
if DATA_MANAGER not in hass.data:
|
||||
raise HomeAssistantError("Backup integration is not available")
|
||||
|
||||
return hass.data[DATA_MANAGER]
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
"""Websocket commands for the Backup integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import async_subscribe_events
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import ManagerStateEvent
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
if DATA_MANAGER in hass.data:
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = async_subscribe_events(hass, on_event)
|
||||
connection.send_result(msg["id"])
|
||||
@@ -8,10 +8,6 @@ from datetime import datetime
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import (
|
||||
async_subscribe_events,
|
||||
async_subscribe_platform_events,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
@@ -56,8 +52,8 @@ class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
update_interval=None,
|
||||
)
|
||||
self.unsubscribe: list[Callable[[], None]] = [
|
||||
async_subscribe_events(hass, self._on_event),
|
||||
async_subscribe_platform_events(hass, self._on_event),
|
||||
backup_manager.async_subscribe_events(self._on_event),
|
||||
backup_manager.async_subscribe_platform_events(self._on_event),
|
||||
]
|
||||
|
||||
self.backup_manager = backup_manager
|
||||
|
||||
@@ -36,7 +36,6 @@ from homeassistant.helpers import (
|
||||
issue_registry as ir,
|
||||
start,
|
||||
)
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util, json as json_util
|
||||
|
||||
@@ -372,12 +371,10 @@ class BackupManager:
|
||||
# Latest backup event and backup event subscribers
|
||||
self.last_event: ManagerStateEvent = BlockedEvent()
|
||||
self.last_action_event: ManagerStateEvent | None = None
|
||||
self._backup_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_event_subscriptions
|
||||
self._backup_platform_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_platform_event_subscriptions
|
||||
self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = []
|
||||
self._backup_platform_event_subscriptions: list[
|
||||
Callable[[BackupPlatformEvent], None]
|
||||
] = []
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the backup manager."""
|
||||
@@ -1385,6 +1382,32 @@ class BackupManager:
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
@callback
|
||||
def async_subscribe_events(
|
||||
self,
|
||||
on_event: Callable[[ManagerStateEvent], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe events."""
|
||||
|
||||
def remove_subscription() -> None:
|
||||
self._backup_event_subscriptions.remove(on_event)
|
||||
|
||||
self._backup_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
@callback
|
||||
def async_subscribe_platform_events(
|
||||
self,
|
||||
on_event: Callable[[BackupPlatformEvent], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to backup platform events."""
|
||||
|
||||
def remove_subscription() -> None:
|
||||
self._backup_platform_event_subscriptions.remove(on_event)
|
||||
|
||||
self._backup_platform_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
def _create_automatic_backup_failed_issue(
|
||||
self, translation_key: str, translation_placeholders: dict[str, str] | None
|
||||
) -> None:
|
||||
|
||||
@@ -19,9 +19,14 @@ from homeassistant.components.onboarding import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
|
||||
from . import BackupManager, Folder, IncorrectPasswordError, http as backup_http
|
||||
from . import (
|
||||
BackupManager,
|
||||
Folder,
|
||||
IncorrectPasswordError,
|
||||
async_get_manager,
|
||||
http as backup_http,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.onboarding import OnboardingStoreData
|
||||
@@ -54,7 +59,7 @@ def with_backup_manager[_ViewT: BaseOnboardingView, **_P](
|
||||
if self._data["done"]:
|
||||
raise HTTPUnauthorized
|
||||
|
||||
manager = await async_get_backup_manager(request.app[KEY_HASS])
|
||||
manager = async_get_manager(request.app[KEY_HASS])
|
||||
return await func(self, manager, request, *args, **kwargs)
|
||||
|
||||
return with_backup
|
||||
|
||||
36
homeassistant/components/backup/services.py
Normal file
36
homeassistant/components/backup/services.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""The Backup integration."""
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
from .const import DATA_MANAGER, DOMAIN
|
||||
|
||||
|
||||
async def _async_handle_create_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating backups."""
|
||||
backup_manager = call.hass.data[DATA_MANAGER]
|
||||
agent_id = list(backup_manager.local_backup_agents)[0]
|
||||
await backup_manager.async_create_backup(
|
||||
agent_ids=[agent_id],
|
||||
include_addons=None,
|
||||
include_all_addons=False,
|
||||
include_database=True,
|
||||
include_folders=None,
|
||||
include_homeassistant=True,
|
||||
name=None,
|
||||
password=None,
|
||||
)
|
||||
|
||||
|
||||
async def _async_handle_create_automatic_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating automatic backups."""
|
||||
await call.hass.data[DATA_MANAGER].async_create_automatic_backup()
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register services."""
|
||||
if not is_hassio(hass):
|
||||
hass.services.async_register(DOMAIN, "create", _async_handle_create_service)
|
||||
hass.services.async_register(
|
||||
DOMAIN, "create_automatic", _async_handle_create_automatic_service
|
||||
)
|
||||
@@ -10,7 +10,11 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .config import Day, ScheduleRecurrence
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import DecryptOnDowloadNotSupported, IncorrectPasswordError
|
||||
from .manager import (
|
||||
DecryptOnDowloadNotSupported,
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .models import BackupNotFound, Folder
|
||||
|
||||
|
||||
@@ -30,6 +34,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_config_info)
|
||||
websocket_api.async_register_command(hass, handle_config_update)
|
||||
@@ -417,3 +422,22 @@ def handle_config_update(
|
||||
changes.pop("type")
|
||||
manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -29,7 +29,7 @@ PLATFORMS: list[Platform] = [
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up bosch alarm services."""
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -66,7 +66,8 @@ async def async_set_panel_date(call: ServiceCall) -> None:
|
||||
) from err
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the bosch alarm integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -168,7 +168,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
key="windazimuth",
|
||||
translation_key="windazimuth",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
icon="mdi:compass-outline",
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
|
||||
@@ -5,8 +5,9 @@ from pycoolmasternet_async import CoolMasterNet
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_SWING_SUPPORT
|
||||
from .const import CONF_SWING_SUPPORT, DOMAIN
|
||||
from .coordinator import CoolmasterConfigEntry, CoolmasterDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR]
|
||||
@@ -48,3 +49,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -> bool:
|
||||
"""Unload a Coolmaster config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant,
|
||||
config_entry: CoolmasterConfigEntry,
|
||||
device_entry: dr.DeviceEntry,
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
return not device_entry.identifiers.intersection(
|
||||
(DOMAIN, unit_id) for unit_id in config_entry.runtime_data.data
|
||||
)
|
||||
|
||||
@@ -9,7 +9,7 @@ from devolo_home_control_api.devices.zwave import Zwave
|
||||
from devolo_home_control_api.homecontrol import HomeControl
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -35,7 +35,7 @@ class DevoloDeviceEntity(Entity):
|
||||
) # This is not doing I/O. It fetches an internal state of the API
|
||||
self._attr_should_poll = False
|
||||
self._attr_unique_id = element_uid
|
||||
self._attr_device_info = DeviceInfo(
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
configuration_url=f"https://{urlparse(device_instance.href).netloc}",
|
||||
identifiers={(DOMAIN, self._device_instance.uid)},
|
||||
manufacturer=device_instance.brand,
|
||||
@@ -88,6 +88,16 @@ class DevoloDeviceEntity(Entity):
|
||||
elif len(message) == 3 and message[2] == "status":
|
||||
# Maybe the API wants to tell us, that the device went on- or offline.
|
||||
self._attr_available = self._device_instance.is_online()
|
||||
elif message[1] == "del" and self.platform.config_entry:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, self._device_instance.uid)}
|
||||
)
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=self.platform.config_entry.entry_id,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug("No valid message received: %s", message)
|
||||
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyW215"],
|
||||
"requirements": ["pyW215==0.7.0"]
|
||||
"requirements": ["pyW215==0.8.0"]
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ SENSORS: list[DROPSensorEntityDescription] = [
|
||||
native_unit_of_measurement=UnitOfVolume.GALLONS,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda device: device.drop_api.water_used_today(),
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DROPSensorEntityDescription(
|
||||
key=AVERAGE_WATER_USED,
|
||||
|
||||
@@ -241,6 +241,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = (
|
||||
obis_reference="SHORT_POWER_FAILURE_COUNT",
|
||||
dsmr_versions={"2.2", "4", "5", "5L"},
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DSMRSensorEntityDescription(
|
||||
@@ -249,6 +250,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = (
|
||||
obis_reference="LONG_POWER_FAILURE_COUNT",
|
||||
dsmr_versions={"2.2", "4", "5", "5L"},
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DSMRSensorEntityDescription(
|
||||
@@ -257,6 +259,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = (
|
||||
obis_reference="VOLTAGE_SAG_L1_COUNT",
|
||||
dsmr_versions={"2.2", "4", "5", "5L"},
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DSMRSensorEntityDescription(
|
||||
@@ -265,6 +268,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = (
|
||||
obis_reference="VOLTAGE_SAG_L2_COUNT",
|
||||
dsmr_versions={"2.2", "4", "5", "5L"},
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DSMRSensorEntityDescription(
|
||||
@@ -273,6 +277,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = (
|
||||
obis_reference="VOLTAGE_SAG_L3_COUNT",
|
||||
dsmr_versions={"2.2", "4", "5", "5L"},
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DSMRSensorEntityDescription(
|
||||
@@ -281,6 +286,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = (
|
||||
obis_reference="VOLTAGE_SWELL_L1_COUNT",
|
||||
dsmr_versions={"2.2", "4", "5", "5L"},
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DSMRSensorEntityDescription(
|
||||
@@ -289,6 +295,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = (
|
||||
obis_reference="VOLTAGE_SWELL_L2_COUNT",
|
||||
dsmr_versions={"2.2", "4", "5", "5L"},
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DSMRSensorEntityDescription(
|
||||
@@ -297,6 +304,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = (
|
||||
obis_reference="VOLTAGE_SWELL_L3_COUNT",
|
||||
dsmr_versions={"2.2", "4", "5", "5L"},
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DSMRSensorEntityDescription(
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
"""Support for sending data to Dweet.io."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import dweepy
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_FRIENDLY_NAME,
|
||||
CONF_NAME,
|
||||
CONF_WHITELIST,
|
||||
EVENT_STATE_CHANGED,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, state as state_helper
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "dweet"
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_WHITELIST, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.entity_id]
|
||||
),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Dweet.io component."""
|
||||
conf = config[DOMAIN]
|
||||
name = conf.get(CONF_NAME)
|
||||
whitelist = conf.get(CONF_WHITELIST)
|
||||
json_body = {}
|
||||
|
||||
def dweet_event_listener(event):
|
||||
"""Listen for new messages on the bus and sends them to Dweet.io."""
|
||||
state = event.data.get("new_state")
|
||||
if (
|
||||
state is None
|
||||
or state.state in (STATE_UNKNOWN, "")
|
||||
or state.entity_id not in whitelist
|
||||
):
|
||||
return
|
||||
|
||||
try:
|
||||
_state = state_helper.state_as_number(state)
|
||||
except ValueError:
|
||||
_state = state.state
|
||||
|
||||
json_body[state.attributes.get(ATTR_FRIENDLY_NAME)] = _state
|
||||
|
||||
send_data(name, json_body)
|
||||
|
||||
hass.bus.listen(EVENT_STATE_CHANGED, dweet_event_listener)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def send_data(name, msg):
|
||||
"""Send the collected data to Dweet.io."""
|
||||
try:
|
||||
dweepy.dweet_for(name, msg)
|
||||
except dweepy.DweepyError:
|
||||
_LOGGER.error("Error saving data to Dweet.io: %s", msg)
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "dweet",
|
||||
"name": "dweet.io",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dweet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["dweepy"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["dweepy==0.3.0"]
|
||||
}
|
||||
@@ -1,124 +0,0 @@
|
||||
"""Support for showing values from Dweet.io."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
|
||||
import dweepy
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE,
|
||||
CONF_NAME,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = "Dweet.io Sensor"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_DEVICE): cv.string,
|
||||
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Dweet sensor."""
|
||||
name = config.get(CONF_NAME)
|
||||
device = config.get(CONF_DEVICE)
|
||||
value_template = config.get(CONF_VALUE_TEMPLATE)
|
||||
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
|
||||
try:
|
||||
content = json.dumps(dweepy.get_latest_dweet_for(device)[0]["content"])
|
||||
except dweepy.DweepyError:
|
||||
_LOGGER.error("Device/thing %s could not be found", device)
|
||||
return
|
||||
|
||||
if value_template and value_template.render_with_possible_json_value(content) == "":
|
||||
_LOGGER.error("%s was not found", value_template)
|
||||
return
|
||||
|
||||
dweet = DweetData(device)
|
||||
|
||||
add_entities([DweetSensor(hass, dweet, name, value_template, unit)], True)
|
||||
|
||||
|
||||
class DweetSensor(SensorEntity):
|
||||
"""Representation of a Dweet sensor."""
|
||||
|
||||
def __init__(self, hass, dweet, name, value_template, unit_of_measurement):
|
||||
"""Initialize the sensor."""
|
||||
self.hass = hass
|
||||
self.dweet = dweet
|
||||
self._name = name
|
||||
self._value_template = value_template
|
||||
self._state = None
|
||||
self._unit_of_measurement = unit_of_measurement
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self):
|
||||
"""Return the unit the value is expressed in."""
|
||||
return self._unit_of_measurement
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state."""
|
||||
return self._state
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from REST API."""
|
||||
self.dweet.update()
|
||||
|
||||
if self.dweet.data is None:
|
||||
self._state = None
|
||||
else:
|
||||
values = json.dumps(self.dweet.data[0]["content"])
|
||||
self._state = self._value_template.render_with_possible_json_value(
|
||||
values, None
|
||||
)
|
||||
|
||||
|
||||
class DweetData:
|
||||
"""The class for handling the data retrieval."""
|
||||
|
||||
def __init__(self, device):
|
||||
"""Initialize the sensor."""
|
||||
self._device = device
|
||||
self.data = None
|
||||
|
||||
def update(self):
|
||||
"""Get the latest data from Dweet.io."""
|
||||
try:
|
||||
self.data = dweepy.get_latest_dweet_for(self._device)
|
||||
except dweepy.DweepyError:
|
||||
_LOGGER.warning("Device %s doesn't contain any data", self._device)
|
||||
self.data = None
|
||||
@@ -12,7 +12,7 @@ from .bridge import DynaliteBridge
|
||||
from .const import DOMAIN, LOGGER, PLATFORMS
|
||||
from .convert_config import convert_config
|
||||
from .panel import async_register_dynalite_frontend
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@@ -21,7 +21,7 @@ type DynaliteConfigEntry = ConfigEntry[DynaliteBridge]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Dynalite platform."""
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
await async_register_dynalite_frontend(hass)
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ async def _request_channel_level(service_call: ServiceCall) -> None:
|
||||
|
||||
|
||||
@callback
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the Dynalite platform."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
||||
@@ -16,7 +16,12 @@ from homeassistant.config_entries import (
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import selector
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
selector,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_MESSAGE,
|
||||
@@ -26,6 +31,9 @@ from .const import (
|
||||
FEED_ID,
|
||||
FEED_NAME,
|
||||
FEED_TAG,
|
||||
SYNC_MODE,
|
||||
SYNC_MODE_AUTO,
|
||||
SYNC_MODE_MANUAL,
|
||||
)
|
||||
|
||||
|
||||
@@ -102,6 +110,17 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"mode": "dropdown",
|
||||
"multiple": True,
|
||||
}
|
||||
if user_input.get(SYNC_MODE) == SYNC_MODE_AUTO:
|
||||
return self.async_create_entry(
|
||||
title=sensor_name(self.url),
|
||||
data={
|
||||
CONF_URL: self.url,
|
||||
CONF_API_KEY: self.api_key,
|
||||
CONF_ONLY_INCLUDE_FEEDID: [
|
||||
feed[FEED_ID] for feed in result[CONF_MESSAGE]
|
||||
],
|
||||
},
|
||||
)
|
||||
return await self.async_step_choose_feeds()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
@@ -110,6 +129,15 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Required(CONF_URL): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
vol.Required(
|
||||
SYNC_MODE, default=SYNC_MODE_MANUAL
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[SYNC_MODE_MANUAL, SYNC_MODE_AUTO],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key=SYNC_MODE,
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
user_input,
|
||||
|
||||
@@ -14,6 +14,9 @@ EMONCMS_UUID_DOC_URL = (
|
||||
FEED_ID = "id"
|
||||
FEED_NAME = "name"
|
||||
FEED_TAG = "tag"
|
||||
SYNC_MODE = "sync_mode"
|
||||
SYNC_MODE_AUTO = "auto"
|
||||
SYNC_MODE_MANUAL = "manual"
|
||||
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
@@ -7,7 +7,8 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"sync_mode": "Synchronization mode"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "Server URL starting with the protocol (http or https)",
|
||||
@@ -24,6 +25,14 @@
|
||||
"already_configured": "This server is already configured"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"sync_mode": {
|
||||
"options": {
|
||||
"auto": "Synchronize all available Feeds",
|
||||
"manual": "Select which Feeds to synchronize"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"energy": {
|
||||
|
||||
@@ -363,7 +363,7 @@
|
||||
"discharging": "[%key:common::state::discharging%]",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"full": "Full"
|
||||
"full": "[%key:common::state::full%]"
|
||||
}
|
||||
},
|
||||
"acb_available_energy": {
|
||||
|
||||
@@ -81,6 +81,7 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity):
|
||||
# if the string is empty
|
||||
if unit_of_measurement := static_info.unit_of_measurement:
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
self._attr_suggested_display_precision = static_info.accuracy_decimals
|
||||
self._attr_device_class = try_parse_enum(
|
||||
SensorDeviceClass, static_info.device_class
|
||||
)
|
||||
@@ -97,7 +98,7 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity):
|
||||
self._attr_state_class = _STATE_CLASSES.from_esphome(state_class)
|
||||
|
||||
@property
|
||||
def native_value(self) -> datetime | str | None:
|
||||
def native_value(self) -> datetime | int | float | None:
|
||||
"""Return the state of the entity."""
|
||||
if not self._has_state or (state := self._state).missing_state:
|
||||
return None
|
||||
@@ -106,7 +107,7 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity):
|
||||
return None
|
||||
if self.device_class is SensorDeviceClass.TIMESTAMP:
|
||||
return dt_util.utc_from_timestamp(state_float)
|
||||
return f"{state_float:.{self._static_info.accuracy_decimals}f}"
|
||||
return state_float
|
||||
|
||||
|
||||
class EsphomeTextSensor(EsphomeEntity[TextSensorInfo, TextSensorState], SensorEntity):
|
||||
|
||||
@@ -19,7 +19,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="chlorine",
|
||||
translation_key="chlorine",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
|
||||
native_unit_of_measurement="mg/L",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250625.0"]
|
||||
"requirements": ["home-assistant-frontend==20250627.0"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
from types import MappingProxyType
|
||||
|
||||
from google.genai import Client
|
||||
from google.genai.errors import APIError, ClientError
|
||||
@@ -36,10 +37,13 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_PROMPT,
|
||||
DEFAULT_TITLE,
|
||||
DEFAULT_TTS_NAME,
|
||||
DOMAIN,
|
||||
FILE_POLLING_INTERVAL_SECONDS,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_TTS_OPTIONS,
|
||||
TIMEOUT_MILLIS,
|
||||
)
|
||||
|
||||
@@ -203,6 +207,8 @@ async def async_setup_entry(
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -216,6 +222,13 @@ async def async_unload_entry(
|
||||
return True
|
||||
|
||||
|
||||
async def async_update_options(
|
||||
hass: HomeAssistant, entry: GoogleGenerativeAIConfigEntry
|
||||
) -> None:
|
||||
"""Update options."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
"""Migrate integration entry structure."""
|
||||
|
||||
@@ -242,6 +255,16 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]
|
||||
|
||||
hass.config_entries.async_add_subentry(parent_entry, subentry)
|
||||
if use_existing:
|
||||
hass.config_entries.async_add_subentry(
|
||||
parent_entry,
|
||||
ConfigSubentry(
|
||||
data=MappingProxyType(RECOMMENDED_TTS_OPTIONS),
|
||||
subentry_type="tts",
|
||||
title=DEFAULT_TTS_NAME,
|
||||
unique_id=None,
|
||||
),
|
||||
)
|
||||
conversation_entity = entity_registry.async_get_entity_id(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
@@ -270,12 +293,65 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
)
|
||||
else:
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
remove_config_subentry_id=None,
|
||||
)
|
||||
|
||||
if not use_existing:
|
||||
await hass.config_entries.async_remove(entry.entry_id)
|
||||
else:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
title=DEFAULT_TITLE,
|
||||
options={},
|
||||
version=2,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, entry: GoogleGenerativeAIConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate entry."""
|
||||
LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 2 and entry.minor_version == 1:
|
||||
# Add TTS subentry which was missing in 2025.7.0b0
|
||||
if not any(
|
||||
subentry.subentry_type == "tts" for subentry in entry.subentries.values()
|
||||
):
|
||||
hass.config_entries.async_add_subentry(
|
||||
entry,
|
||||
ConfigSubentry(
|
||||
data=MappingProxyType(RECOMMENDED_TTS_OPTIONS),
|
||||
subentry_type="tts",
|
||||
title=DEFAULT_TTS_NAME,
|
||||
unique_id=None,
|
||||
),
|
||||
)
|
||||
|
||||
# Correct broken device migration in Home Assistant Core 2025.7.0b0-2025.7.0b1
|
||||
device_registry = dr.async_get(hass)
|
||||
for device in dr.async_entries_for_config_entry(
|
||||
device_registry, entry.entry_id
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
remove_config_subentry_id=None,
|
||||
)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, minor_version=2)
|
||||
|
||||
LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -47,13 +47,18 @@ from .const import (
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEFAULT_TITLE,
|
||||
DEFAULT_TTS_NAME,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
RECOMMENDED_TTS_MODEL,
|
||||
RECOMMENDED_TTS_OPTIONS,
|
||||
RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
|
||||
TIMEOUT_MILLIS,
|
||||
)
|
||||
@@ -66,12 +71,6 @@ STEP_API_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
RECOMMENDED_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
|
||||
async def validate_input(data: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect.
|
||||
@@ -93,6 +92,7 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Google Generative AI Conversation."""
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_api(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -118,15 +118,21 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data=user_input,
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title="Google Generative AI",
|
||||
title=DEFAULT_TITLE,
|
||||
data=user_input,
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_OPTIONS,
|
||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
},
|
||||
{
|
||||
"subentry_type": "tts",
|
||||
"data": RECOMMENDED_TTS_OPTIONS,
|
||||
"title": DEFAULT_TTS_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
],
|
||||
)
|
||||
return self.async_show_form(
|
||||
@@ -172,10 +178,13 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"conversation": ConversationSubentryFlowHandler}
|
||||
return {
|
||||
"conversation": LLMSubentryFlowHandler,
|
||||
"tts": LLMSubentryFlowHandler,
|
||||
}
|
||||
|
||||
|
||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
class LLMSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Flow for managing conversation subentries."""
|
||||
|
||||
last_rendered_recommended = False
|
||||
@@ -202,7 +211,11 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
if user_input is None:
|
||||
if self._is_new:
|
||||
options = RECOMMENDED_OPTIONS.copy()
|
||||
options: dict[str, Any]
|
||||
if self._subentry_type == "tts":
|
||||
options = RECOMMENDED_TTS_OPTIONS.copy()
|
||||
else:
|
||||
options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
||||
else:
|
||||
# If this is a reconfiguration, we need to copy the existing options
|
||||
# so that we can show the current values in the form.
|
||||
@@ -216,7 +229,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
# Don't allow to save options that enable the Google Seearch tool with an Assist API
|
||||
# Don't allow to save options that enable the Google Search tool with an Assist API
|
||||
if not (
|
||||
user_input.get(CONF_LLM_HASS_API)
|
||||
and user_input.get(CONF_USE_GOOGLE_SEARCH_TOOL, False) is True
|
||||
@@ -240,7 +253,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
options = user_input
|
||||
|
||||
schema = await google_generative_ai_config_option_schema(
|
||||
self.hass, self._is_new, options, self._genai_client
|
||||
self.hass, self._is_new, self._subentry_type, options, self._genai_client
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="set_options", data_schema=vol.Schema(schema), errors=errors
|
||||
@@ -253,6 +266,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
async def google_generative_ai_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
is_new: bool,
|
||||
subentry_type: str,
|
||||
options: Mapping[str, Any],
|
||||
genai_client: genai.Client,
|
||||
) -> dict:
|
||||
@@ -270,26 +284,39 @@ async def google_generative_ai_config_option_schema(
|
||||
suggested_llm_apis = [suggested_llm_apis]
|
||||
|
||||
if is_new:
|
||||
if CONF_NAME in options:
|
||||
default_name = options[CONF_NAME]
|
||||
elif subentry_type == "tts":
|
||||
default_name = DEFAULT_TTS_NAME
|
||||
else:
|
||||
default_name = DEFAULT_CONVERSATION_NAME
|
||||
schema: dict[vol.Required | vol.Optional, Any] = {
|
||||
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
|
||||
vol.Required(CONF_NAME, default=default_name): str,
|
||||
}
|
||||
else:
|
||||
schema = {}
|
||||
|
||||
if subentry_type == "conversation":
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": suggested_llm_apis},
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(options=hass_apis, multiple=True)
|
||||
),
|
||||
}
|
||||
)
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": suggested_llm_apis},
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
@@ -310,7 +337,7 @@ async def google_generative_ai_config_option_schema(
|
||||
if (
|
||||
api_model.display_name
|
||||
and api_model.name
|
||||
and "tts" not in api_model.name
|
||||
and ("tts" in api_model.name) == (subentry_type == "tts")
|
||||
and "vision" not in api_model.name
|
||||
and api_model.supported_actions
|
||||
and "generateContent" in api_model.supported_actions
|
||||
@@ -341,12 +368,17 @@ async def google_generative_ai_config_option_schema(
|
||||
)
|
||||
)
|
||||
|
||||
if subentry_type == "tts":
|
||||
default_model = RECOMMENDED_TTS_MODEL
|
||||
else:
|
||||
default_model = RECOMMENDED_CHAT_MODEL
|
||||
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CHAT_MODEL,
|
||||
description={"suggested_value": options.get(CONF_CHAT_MODEL)},
|
||||
default=RECOMMENDED_CHAT_MODEL,
|
||||
default=default_model,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(mode=SelectSelectorMode.DROPDOWN, options=models)
|
||||
),
|
||||
@@ -396,13 +428,18 @@ async def google_generative_ai_config_option_schema(
|
||||
},
|
||||
default=RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
): harm_block_thresholds_selector,
|
||||
vol.Optional(
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
description={
|
||||
"suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL),
|
||||
},
|
||||
default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
if subentry_type == "conversation":
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
description={
|
||||
"suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL),
|
||||
},
|
||||
default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
||||
@@ -2,17 +2,21 @@
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.helpers import llm
|
||||
|
||||
DOMAIN = "google_generative_ai_conversation"
|
||||
DEFAULT_TITLE = "Google Generative AI"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
CONF_PROMPT = "prompt"
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Google AI Conversation"
|
||||
DEFAULT_TTS_NAME = "Google AI TTS"
|
||||
|
||||
ATTR_MODEL = "model"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.5-flash"
|
||||
RECOMMENDED_TTS_MODEL = "gemini-2.5-flash-preview-tts"
|
||||
RECOMMENDED_TTS_MODEL = "models/gemini-2.5-flash-preview-tts"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_TOP_P = "top_p"
|
||||
@@ -31,3 +35,12 @@ RECOMMENDED_USE_GOOGLE_SEARCH_TOOL = False
|
||||
|
||||
TIMEOUT_MILLIS = 10000
|
||||
FILE_POLLING_INTERVAL_SECONDS = 0.05
|
||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
||||
RECOMMENDED_TTS_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
||||
@@ -61,9 +61,6 @@ class GoogleGenerativeAIConversationEntity(
|
||||
self.hass, "conversation", self.entry.entry_id, self.entity_id
|
||||
)
|
||||
conversation.async_set_agent(self.hass, self.entry, self)
|
||||
self.entry.async_on_unload(
|
||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""When entity will be removed from Home Assistant."""
|
||||
@@ -103,10 +100,3 @@ class GoogleGenerativeAIConversationEntity(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
# Reload as we update device info + entity name + supported features
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -21,6 +21,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"title": entry.title,
|
||||
"data": entry.data,
|
||||
"options": entry.options,
|
||||
"subentries": dict(entry.subentries),
|
||||
},
|
||||
TO_REDACT,
|
||||
)
|
||||
|
||||
@@ -301,7 +301,12 @@ async def _transform_stream(
|
||||
class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
"""Google Generative AI base entity."""
|
||||
|
||||
def __init__(self, entry: ConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
entry: ConfigEntry,
|
||||
subentry: ConfigSubentry,
|
||||
default_model: str = RECOMMENDED_CHAT_MODEL,
|
||||
) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
@@ -312,7 +317,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
model=subentry.data.get(CONF_CHAT_MODEL, default_model).split("/")[-1],
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
|
||||
@@ -0,0 +1,73 @@
|
||||
"""Helper classes for Google Generative AI integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import io
|
||||
import wave
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import LOGGER
|
||||
|
||||
|
||||
def convert_to_wav(audio_data: bytes, mime_type: str) -> bytes:
|
||||
"""Generate a WAV file header for the given audio data and parameters.
|
||||
|
||||
Args:
|
||||
audio_data: The raw audio data as a bytes object.
|
||||
mime_type: Mime type of the audio data.
|
||||
|
||||
Returns:
|
||||
A bytes object representing the WAV file header.
|
||||
|
||||
"""
|
||||
parameters = _parse_audio_mime_type(mime_type)
|
||||
|
||||
wav_buffer = io.BytesIO()
|
||||
with wave.open(wav_buffer, "wb") as wf:
|
||||
wf.setnchannels(1)
|
||||
wf.setsampwidth(parameters["bits_per_sample"] // 8)
|
||||
wf.setframerate(parameters["rate"])
|
||||
wf.writeframes(audio_data)
|
||||
|
||||
return wav_buffer.getvalue()
|
||||
|
||||
|
||||
# Below code is from https://aistudio.google.com/app/generate-speech
|
||||
# when you select "Get SDK code to generate speech".
|
||||
def _parse_audio_mime_type(mime_type: str) -> dict[str, int]:
|
||||
"""Parse bits per sample and rate from an audio MIME type string.
|
||||
|
||||
Assumes bits per sample is encoded like "L16" and rate as "rate=xxxxx".
|
||||
|
||||
Args:
|
||||
mime_type: The audio MIME type string (e.g., "audio/L16;rate=24000").
|
||||
|
||||
Returns:
|
||||
A dictionary with "bits_per_sample" and "rate" keys. Values will be
|
||||
integers if found, otherwise None.
|
||||
|
||||
"""
|
||||
if not mime_type.startswith("audio/L"):
|
||||
LOGGER.warning("Received unexpected MIME type %s", mime_type)
|
||||
raise HomeAssistantError(f"Unsupported audio MIME type: {mime_type}")
|
||||
|
||||
bits_per_sample = 16
|
||||
rate = 24000
|
||||
|
||||
# Extract rate from parameters
|
||||
parts = mime_type.split(";")
|
||||
for param in parts: # Skip the main type part
|
||||
param = param.strip()
|
||||
if param.lower().startswith("rate="):
|
||||
# Handle cases like "rate=" with no value or non-integer value and keep rate as default
|
||||
with suppress(ValueError, IndexError):
|
||||
rate_str = param.split("=", 1)[1]
|
||||
rate = int(rate_str)
|
||||
elif param.startswith("audio/L"):
|
||||
# Keep bits_per_sample as default if conversion fails
|
||||
with suppress(ValueError, IndexError):
|
||||
bits_per_sample = int(param.split("L", 1)[1])
|
||||
|
||||
return {"bits_per_sample": bits_per_sample, "rate": rate}
|
||||
@@ -29,7 +29,6 @@
|
||||
"reconfigure": "Reconfigure conversation agent"
|
||||
},
|
||||
"entry_type": "Conversation agent",
|
||||
|
||||
"step": {
|
||||
"set_options": {
|
||||
"data": {
|
||||
@@ -61,6 +60,34 @@
|
||||
"error": {
|
||||
"invalid_google_search_option": "Google Search can only be enabled if nothing is selected in the \"Control Home Assistant\" setting."
|
||||
}
|
||||
},
|
||||
"tts": {
|
||||
"initiate_flow": {
|
||||
"user": "Add Text-to-Speech service",
|
||||
"reconfigure": "Reconfigure Text-to-Speech service"
|
||||
},
|
||||
"entry_type": "Text-to-Speech",
|
||||
"step": {
|
||||
"set_options": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"recommended": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::recommended%]",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"temperature": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::temperature%]",
|
||||
"top_p": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::top_p%]",
|
||||
"top_k": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::top_k%]",
|
||||
"max_tokens": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::max_tokens%]",
|
||||
"harassment_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::harassment_block_threshold%]",
|
||||
"hate_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::hate_block_threshold%]",
|
||||
"sexual_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::sexual_block_threshold%]",
|
||||
"dangerous_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::dangerous_block_threshold%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"entry_not_loaded": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::abort::entry_not_loaded%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import io
|
||||
import logging
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
import wave
|
||||
|
||||
from google.genai import types
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.tts import (
|
||||
ATTR_VOICE,
|
||||
@@ -16,15 +15,14 @@ from homeassistant.components.tts import (
|
||||
TtsAudioType,
|
||||
Voice,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ATTR_MODEL, DOMAIN, RECOMMENDED_TTS_MODEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import CONF_CHAT_MODEL, LOGGER, RECOMMENDED_TTS_MODEL
|
||||
from .entity import GoogleGenerativeAILLMBaseEntity
|
||||
from .helpers import convert_to_wav
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -32,15 +30,23 @@ async def async_setup_entry(
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up TTS entity."""
|
||||
tts_entity = GoogleGenerativeAITextToSpeechEntity(config_entry)
|
||||
async_add_entities([tts_entity])
|
||||
"""Set up TTS entities."""
|
||||
for subentry in config_entry.subentries.values():
|
||||
if subentry.subentry_type != "tts":
|
||||
continue
|
||||
|
||||
async_add_entities(
|
||||
[GoogleGenerativeAITextToSpeechEntity(config_entry, subentry)],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class GoogleGenerativeAITextToSpeechEntity(TextToSpeechEntity):
|
||||
class GoogleGenerativeAITextToSpeechEntity(
|
||||
TextToSpeechEntity, GoogleGenerativeAILLMBaseEntity
|
||||
):
|
||||
"""Google Generative AI text-to-speech entity."""
|
||||
|
||||
_attr_supported_options = [ATTR_VOICE, ATTR_MODEL]
|
||||
_attr_supported_options = [ATTR_VOICE]
|
||||
# See https://ai.google.dev/gemini-api/docs/speech-generation#languages
|
||||
_attr_supported_languages = [
|
||||
"ar-EG",
|
||||
@@ -68,6 +74,8 @@ class GoogleGenerativeAITextToSpeechEntity(TextToSpeechEntity):
|
||||
"uk-UA",
|
||||
"vi-VN",
|
||||
]
|
||||
# Unused, but required by base class.
|
||||
# The Gemini TTS models detect the input language automatically.
|
||||
_attr_default_language = "en-US"
|
||||
# See https://ai.google.dev/gemini-api/docs/speech-generation#voices
|
||||
_supported_voices = [
|
||||
@@ -106,110 +114,44 @@ class GoogleGenerativeAITextToSpeechEntity(TextToSpeechEntity):
|
||||
)
|
||||
]
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize Google Generative AI Conversation speech entity."""
|
||||
self.entry = entry
|
||||
self._attr_name = "Google Generative AI TTS"
|
||||
self._attr_unique_id = f"{entry.entry_id}_tts"
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._genai_client = entry.runtime_data
|
||||
self._default_voice_id = self._supported_voices[0].voice_id
|
||||
def __init__(self, config_entry: ConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the TTS entity."""
|
||||
super().__init__(config_entry, subentry, RECOMMENDED_TTS_MODEL)
|
||||
|
||||
@callback
|
||||
def async_get_supported_voices(self, language: str) -> list[Voice] | None:
|
||||
def async_get_supported_voices(self, language: str) -> list[Voice]:
|
||||
"""Return a list of supported voices for a language."""
|
||||
return self._supported_voices
|
||||
|
||||
@cached_property
|
||||
def default_options(self) -> Mapping[str, Any]:
|
||||
"""Return a mapping with the default options."""
|
||||
return {
|
||||
ATTR_VOICE: self._supported_voices[0].voice_id,
|
||||
}
|
||||
|
||||
async def async_get_tts_audio(
|
||||
self, message: str, language: str, options: dict[str, Any]
|
||||
) -> TtsAudioType:
|
||||
"""Load tts audio file from the engine."""
|
||||
try:
|
||||
response = self._genai_client.models.generate_content(
|
||||
model=options.get(ATTR_MODEL, RECOMMENDED_TTS_MODEL),
|
||||
contents=message,
|
||||
config=types.GenerateContentConfig(
|
||||
response_modalities=["AUDIO"],
|
||||
speech_config=types.SpeechConfig(
|
||||
voice_config=types.VoiceConfig(
|
||||
prebuilt_voice_config=types.PrebuiltVoiceConfig(
|
||||
voice_name=options.get(
|
||||
ATTR_VOICE, self._default_voice_id
|
||||
)
|
||||
)
|
||||
)
|
||||
),
|
||||
),
|
||||
config = self.create_generate_content_config()
|
||||
config.response_modalities = ["AUDIO"]
|
||||
config.speech_config = types.SpeechConfig(
|
||||
voice_config=types.VoiceConfig(
|
||||
prebuilt_voice_config=types.PrebuiltVoiceConfig(
|
||||
voice_name=options[ATTR_VOICE]
|
||||
)
|
||||
)
|
||||
)
|
||||
try:
|
||||
response = await self._genai_client.aio.models.generate_content(
|
||||
model=self.subentry.data.get(CONF_CHAT_MODEL, RECOMMENDED_TTS_MODEL),
|
||||
contents=message,
|
||||
config=config,
|
||||
)
|
||||
|
||||
data = response.candidates[0].content.parts[0].inline_data.data
|
||||
mime_type = response.candidates[0].content.parts[0].inline_data.mime_type
|
||||
except Exception as exc:
|
||||
_LOGGER.warning(
|
||||
"Error during processing of TTS request %s", exc, exc_info=True
|
||||
)
|
||||
except (APIError, ClientError, ValueError) as exc:
|
||||
LOGGER.error("Error during TTS: %s", exc, exc_info=True)
|
||||
raise HomeAssistantError(exc) from exc
|
||||
return "wav", self._convert_to_wav(data, mime_type)
|
||||
|
||||
def _convert_to_wav(self, audio_data: bytes, mime_type: str) -> bytes:
|
||||
"""Generate a WAV file header for the given audio data and parameters.
|
||||
|
||||
Args:
|
||||
audio_data: The raw audio data as a bytes object.
|
||||
mime_type: Mime type of the audio data.
|
||||
|
||||
Returns:
|
||||
A bytes object representing the WAV file header.
|
||||
|
||||
"""
|
||||
parameters = self._parse_audio_mime_type(mime_type)
|
||||
|
||||
wav_buffer = io.BytesIO()
|
||||
with wave.open(wav_buffer, "wb") as wf:
|
||||
wf.setnchannels(1)
|
||||
wf.setsampwidth(parameters["bits_per_sample"] // 8)
|
||||
wf.setframerate(parameters["rate"])
|
||||
wf.writeframes(audio_data)
|
||||
|
||||
return wav_buffer.getvalue()
|
||||
|
||||
def _parse_audio_mime_type(self, mime_type: str) -> dict[str, int]:
|
||||
"""Parse bits per sample and rate from an audio MIME type string.
|
||||
|
||||
Assumes bits per sample is encoded like "L16" and rate as "rate=xxxxx".
|
||||
|
||||
Args:
|
||||
mime_type: The audio MIME type string (e.g., "audio/L16;rate=24000").
|
||||
|
||||
Returns:
|
||||
A dictionary with "bits_per_sample" and "rate" keys. Values will be
|
||||
integers if found, otherwise None.
|
||||
|
||||
"""
|
||||
if not mime_type.startswith("audio/L"):
|
||||
_LOGGER.warning("Received unexpected MIME type %s", mime_type)
|
||||
raise HomeAssistantError(f"Unsupported audio MIME type: {mime_type}")
|
||||
|
||||
bits_per_sample = 16
|
||||
rate = 24000
|
||||
|
||||
# Extract rate from parameters
|
||||
parts = mime_type.split(";")
|
||||
for param in parts: # Skip the main type part
|
||||
param = param.strip()
|
||||
if param.lower().startswith("rate="):
|
||||
# Handle cases like "rate=" with no value or non-integer value and keep rate as default
|
||||
with suppress(ValueError, IndexError):
|
||||
rate_str = param.split("=", 1)[1]
|
||||
rate = int(rate_str)
|
||||
elif param.startswith("audio/L"):
|
||||
# Keep bits_per_sample as default if conversion fails
|
||||
with suppress(ValueError, IndexError):
|
||||
bits_per_sample = int(param.split("L", 1)[1])
|
||||
|
||||
return {"bits_per_sample": bits_per_sample, "rate": rate}
|
||||
return "wav", convert_to_wav(data, mime_type)
|
||||
|
||||
@@ -27,7 +27,7 @@ from .const import (
|
||||
SIGNAL_PAIRED_SENSOR_COORDINATOR_ADDED,
|
||||
)
|
||||
from .coordinator import GuardianDataUpdateCoordinator
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@@ -55,7 +55,7 @@ class GuardianData:
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Elexa Guardian component."""
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -122,8 +122,9 @@ async def async_upgrade_firmware(call: ServiceCall, data: GuardianData) -> None:
|
||||
)
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the Renault services."""
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the guardian services."""
|
||||
for service_name, schema, method in (
|
||||
(
|
||||
SERVICE_NAME_PAIR_SENSOR,
|
||||
|
||||
@@ -9,7 +9,7 @@ ASSETS_URL = "https://habitica-assets.s3.amazonaws.com/mobileApp/images/"
|
||||
SITE_DATA_URL = "https://habitica.com/user/settings/siteData"
|
||||
FORGOT_PASSWORD_URL = "https://habitica.com/forgot-password"
|
||||
SIGN_UP_URL = "https://habitica.com/register"
|
||||
HABITICANS_URL = "https://habitica.com/static/img/home-main@3x.ffc32b12.png"
|
||||
HABITICANS_URL = "https://cdn.habitica.com/assets/home-main@3x-Dwnue45Z.png"
|
||||
|
||||
DOMAIN = "habitica"
|
||||
|
||||
|
||||
@@ -48,13 +48,13 @@ from homeassistant.components.backup import (
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
async_get_manager as async_get_backup_manager,
|
||||
suggested_filename as suggested_backup_filename,
|
||||
suggested_filename_from_name_date,
|
||||
)
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
@@ -839,7 +839,7 @@ async def backup_addon_before_update(
|
||||
|
||||
async def backup_core_before_update(hass: HomeAssistant) -> None:
|
||||
"""Prepare for updating core."""
|
||||
backup_manager = await async_get_backup_manager(hass)
|
||||
backup_manager = async_get_backup_manager(hass)
|
||||
client = get_supervisor_client(hass)
|
||||
|
||||
try:
|
||||
|
||||
@@ -9,9 +9,9 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import services
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HeosConfigEntry, HeosCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
|
||||
@@ -22,7 +22,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the HEOS component."""
|
||||
services.register(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import ATTR_MEDIA_VOLUME_LEVEL
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
@@ -44,7 +44,8 @@ HEOS_SIGN_IN_SCHEMA = vol.Schema(
|
||||
HEOS_SIGN_OUT_SCHEMA = vol.Schema({})
|
||||
|
||||
|
||||
def register(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register HEOS services."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from .api import AsyncConfigEntryAuth
|
||||
from .const import DOMAIN, OLD_NEW_UNIQUE_ID_SUFFIX_MAP
|
||||
from .coordinator import HomeConnectConfigEntry, HomeConnectCoordinator
|
||||
from .services import register_actions
|
||||
from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,7 +43,7 @@ PLATFORMS = [
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Home Connect component."""
|
||||
register_actions(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ from aiohomeconnect.model.error import HomeConnectError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
@@ -522,7 +522,8 @@ async def async_service_start_program(call: ServiceCall) -> None:
|
||||
await _async_service_program(call, True)
|
||||
|
||||
|
||||
def register_actions(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register custom actions."""
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -7,7 +7,10 @@ import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from ha_silabs_firmware_client import FirmwareUpdateClient
|
||||
from aiohttp import ClientError
|
||||
from ha_silabs_firmware_client import FirmwareUpdateClient, ManifestMissing
|
||||
from universal_silabs_flasher.common import Version
|
||||
from universal_silabs_flasher.firmware import NabuCasaMetadata
|
||||
|
||||
from homeassistant.components.hassio import (
|
||||
AddonError,
|
||||
@@ -24,6 +27,7 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
@@ -64,6 +68,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self.addon_start_task: asyncio.Task | None = None
|
||||
self.addon_uninstall_task: asyncio.Task | None = None
|
||||
self.firmware_install_task: asyncio.Task | None = None
|
||||
self.installing_firmware_name: str | None = None
|
||||
|
||||
def _get_translation_placeholders(self) -> dict[str, str]:
|
||||
"""Shared translation placeholders."""
|
||||
@@ -149,15 +154,74 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
assert self._device is not None
|
||||
|
||||
if not self.firmware_install_task:
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = FirmwareUpdateClient(fw_update_url, session)
|
||||
manifest = await client.async_update_data()
|
||||
# Keep track of the firmware we're working with, for error messages
|
||||
self.installing_firmware_name = firmware_name
|
||||
|
||||
fw_meta = next(
|
||||
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
|
||||
# Installing new firmware is only truly required if the wrong type is
|
||||
# installed: upgrading to the latest release of the current firmware type
|
||||
# isn't strictly necessary for functionality.
|
||||
firmware_install_required = self._probed_firmware_info is None or (
|
||||
self._probed_firmware_info.firmware_type
|
||||
!= expected_installed_firmware_type
|
||||
)
|
||||
|
||||
fw_data = await client.async_fetch_firmware(fw_meta)
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = FirmwareUpdateClient(fw_update_url, session)
|
||||
|
||||
try:
|
||||
manifest = await client.async_update_data()
|
||||
fw_manifest = next(
|
||||
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
|
||||
)
|
||||
except (StopIteration, TimeoutError, ClientError, ManifestMissing):
|
||||
_LOGGER.warning(
|
||||
"Failed to fetch firmware update manifest", exc_info=True
|
||||
)
|
||||
|
||||
# Not having internet access should not prevent setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug(
|
||||
"Skipping firmware upgrade due to index download failure"
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="firmware_download_failed"
|
||||
)
|
||||
|
||||
if not firmware_install_required:
|
||||
assert self._probed_firmware_info is not None
|
||||
|
||||
# Make sure we do not downgrade the firmware
|
||||
fw_metadata = NabuCasaMetadata.from_json(fw_manifest.metadata)
|
||||
fw_version = fw_metadata.get_public_version()
|
||||
probed_fw_version = Version(self._probed_firmware_info.firmware_version)
|
||||
|
||||
if probed_fw_version >= fw_version:
|
||||
_LOGGER.debug(
|
||||
"Not downgrading firmware, installed %s is newer than available %s",
|
||||
probed_fw_version,
|
||||
fw_version,
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
try:
|
||||
fw_data = await client.async_fetch_firmware(fw_manifest)
|
||||
except (TimeoutError, ClientError, ValueError):
|
||||
_LOGGER.warning("Failed to fetch firmware update", exc_info=True)
|
||||
|
||||
# If we cannot download new firmware, we shouldn't block setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug(
|
||||
"Skipping firmware upgrade due to image download failure"
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
# Otherwise, fail
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="firmware_download_failed"
|
||||
)
|
||||
|
||||
self.firmware_install_task = self.hass.async_create_task(
|
||||
async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
@@ -183,8 +247,40 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
progress_task=self.firmware_install_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.firmware_install_task
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to flash firmware")
|
||||
return self.async_show_progress_done(next_step_id="firmware_install_failed")
|
||||
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
async def async_step_firmware_download_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware download failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_firmware_install_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware install failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_install_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_pick_firmware_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -215,6 +311,14 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_pre_confirm_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pre-confirm Zigbee setup."""
|
||||
|
||||
# This step is necessary to prevent `user_input` from being passed through
|
||||
return await self.async_step_confirm_zigbee()
|
||||
|
||||
async def async_step_confirm_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -409,7 +513,15 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
finally:
|
||||
self.addon_start_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="confirm_otbr")
|
||||
return self.async_show_progress_done(next_step_id="pre_confirm_otbr")
|
||||
|
||||
async def async_step_pre_confirm_otbr(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pre-confirm OTBR setup."""
|
||||
|
||||
# This step is necessary to prevent `user_input` from being passed through
|
||||
return await self.async_step_confirm_otbr()
|
||||
|
||||
async def async_step_confirm_otbr(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -36,7 +36,9 @@
|
||||
"otbr_addon_already_running": "The OpenThread Border Router add-on is already running, it cannot be installed again.",
|
||||
"zha_still_using_stick": "This {model} is in use by the Zigbee Home Automation integration. Please migrate your Zigbee network to another adapter or delete the integration and try again.",
|
||||
"otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again.",
|
||||
"unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or add-on is currently trying to communicate with the device. If you are running Home Assistant OS in a virtual machine or in Docker, please make sure that permissions are set correctly for the device."
|
||||
"unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or add-on is currently trying to communicate with the device. If you are running Home Assistant OS in a virtual machine or in Docker, please make sure that permissions are set correctly for the device.",
|
||||
"fw_download_failed": "{firmware_name} firmware for your {model} failed to download. Make sure Home Assistant has internet access and try again.",
|
||||
"fw_install_failed": "{firmware_name} firmware failed to install, check Home Assistant logs for more information."
|
||||
},
|
||||
"progress": {
|
||||
"install_firmware": "Please wait while {firmware_name} firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes."
|
||||
|
||||
@@ -93,7 +93,7 @@ class SkyConnectFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
firmware_name="Zigbee",
|
||||
expected_installed_firmware_type=ApplicationType.EZSP,
|
||||
step_id="install_zigbee_firmware",
|
||||
next_step_id="confirm_zigbee",
|
||||
next_step_id="pre_confirm_zigbee",
|
||||
)
|
||||
|
||||
async def async_step_install_thread_firmware(
|
||||
|
||||
@@ -92,7 +92,9 @@
|
||||
"otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]",
|
||||
"zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]",
|
||||
"otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]",
|
||||
"unsupported_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::unsupported_firmware%]"
|
||||
"unsupported_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::unsupported_firmware%]",
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]",
|
||||
"fw_install_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_install_failed%]"
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
@@ -145,7 +147,9 @@
|
||||
"otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]",
|
||||
"zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]",
|
||||
"otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]",
|
||||
"unsupported_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::unsupported_firmware%]"
|
||||
"unsupported_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::unsupported_firmware%]",
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]",
|
||||
"fw_install_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_install_failed%]"
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
|
||||
@@ -117,7 +117,9 @@
|
||||
"otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]",
|
||||
"zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]",
|
||||
"otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]",
|
||||
"unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or addon is currently trying to communicate with the device."
|
||||
"unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or add-on is currently trying to communicate with the device.",
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]",
|
||||
"fw_install_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_install_failed%]"
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
|
||||
@@ -9,17 +9,9 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_create_clientsession,
|
||||
async_get_clientsession,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
CONF_COOL_AWAY_TEMPERATURE,
|
||||
CONF_HEAT_AWAY_TEMPERATURE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import _LOGGER, CONF_COOL_AWAY_TEMPERATURE, CONF_HEAT_AWAY_TEMPERATURE
|
||||
|
||||
UPDATE_LOOP_SLEEP_TIME = 5
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.HUMIDIFIER, Platform.SENSOR, Platform.SWITCH]
|
||||
@@ -56,11 +48,11 @@ async def async_setup_entry(
|
||||
username = config_entry.data[CONF_USERNAME]
|
||||
password = config_entry.data[CONF_PASSWORD]
|
||||
|
||||
if len(hass.config_entries.async_entries(DOMAIN)) > 1:
|
||||
session = async_create_clientsession(hass)
|
||||
else:
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
# Always create a new session for Honeywell to prevent cookie injection
|
||||
# issues. Even with response_url handling in aiosomecomfort 0.0.33+,
|
||||
# cookies can still leak into other integrations when using the shared
|
||||
# session. See issue #147395.
|
||||
session = async_create_clientsession(hass)
|
||||
client = aiosomecomfort.AIOSomeComfort(username, password, session=session)
|
||||
try:
|
||||
await client.login()
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_COOL_AWAY_TEMPERATURE,
|
||||
@@ -114,10 +114,14 @@ class HoneywellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def is_valid(self, **kwargs) -> bool:
|
||||
"""Check if login credentials are valid."""
|
||||
# Always create a new session for Honeywell to prevent cookie injection
|
||||
# issues. Even with response_url handling in aiosomecomfort 0.0.33+,
|
||||
# cookies can still leak into other integrations when using the shared
|
||||
# session. See issue #147395.
|
||||
client = aiosomecomfort.AIOSomeComfort(
|
||||
kwargs[CONF_USERNAME],
|
||||
kwargs[CONF_PASSWORD],
|
||||
session=async_get_clientsession(self.hass),
|
||||
session=async_create_clientsession(self.hass),
|
||||
)
|
||||
|
||||
await client.login()
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/honeywell",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["somecomfort"],
|
||||
"requirements": ["AIOSomecomfort==0.0.32"]
|
||||
"requirements": ["AIOSomecomfort==0.0.33"]
|
||||
}
|
||||
|
||||
@@ -223,7 +223,7 @@ async def async_setup_auth(
|
||||
# We first start with a string check to avoid parsing query params
|
||||
# for every request.
|
||||
elif (
|
||||
request.method == "GET"
|
||||
request.method in ["GET", "HEAD"]
|
||||
and SIGN_QUERY_PARAM in request.query_string
|
||||
and async_validate_signed_request(request)
|
||||
):
|
||||
|
||||
@@ -90,7 +90,9 @@ class AutomowerButtonEntity(AutomowerAvailableEntity, ButtonEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return the available attribute of the entity."""
|
||||
return self.entity_description.available_fn(self.mower_attributes)
|
||||
return super().available and self.entity_description.available_fn(
|
||||
self.mower_attributes
|
||||
)
|
||||
|
||||
@handle_sending_exception()
|
||||
async def async_press(self) -> None:
|
||||
|
||||
@@ -2,15 +2,18 @@
|
||||
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aioautomower.model import make_name_string
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import AutomowerConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AutomowerDataUpdateCoordinator
|
||||
from .entity import AutomowerBaseEntity
|
||||
|
||||
@@ -51,6 +54,19 @@ class AutomowerCalendarEntity(AutomowerBaseEntity, CalendarEntity):
|
||||
self._attr_unique_id = mower_id
|
||||
self._event: CalendarEvent | None = None
|
||||
|
||||
@property
|
||||
def device_name(self) -> str:
|
||||
"""Return the prefix for the event summary."""
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, self.mower_id)}
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
assert device_entry is not None
|
||||
assert device_entry.name is not None
|
||||
|
||||
return device_entry.name_by_user or device_entry.name
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
"""Return the current or next upcoming event."""
|
||||
@@ -66,7 +82,7 @@ class AutomowerCalendarEntity(AutomowerBaseEntity, CalendarEntity):
|
||||
program_event.work_area_id
|
||||
]
|
||||
return CalendarEvent(
|
||||
summary=make_name_string(work_area_name, program_event.schedule_no),
|
||||
summary=f"{self.device_name} {make_name_string(work_area_name, program_event.schedule_no)}",
|
||||
start=program_event.start,
|
||||
end=program_event.end,
|
||||
rrule=program_event.rrule_str,
|
||||
@@ -93,7 +109,7 @@ class AutomowerCalendarEntity(AutomowerBaseEntity, CalendarEntity):
|
||||
]
|
||||
calendar_events.append(
|
||||
CalendarEvent(
|
||||
summary=make_name_string(work_area_name, program_event.schedule_no),
|
||||
summary=f"{self.device_name} {make_name_string(work_area_name, program_event.schedule_no)}",
|
||||
start=program_event.start.replace(tzinfo=start_date.tzinfo),
|
||||
end=program_event.end.replace(tzinfo=start_date.tzinfo),
|
||||
rrule=program_event.rrule_str,
|
||||
|
||||
@@ -1,7 +1,19 @@
|
||||
"""The constants for the Husqvarna Automower integration."""
|
||||
|
||||
from aioautomower.model import MowerStates
|
||||
|
||||
DOMAIN = "husqvarna_automower"
|
||||
EXECUTION_TIME_DELAY = 5
|
||||
NAME = "Husqvarna Automower"
|
||||
OAUTH2_AUTHORIZE = "https://api.authentication.husqvarnagroup.dev/v1/oauth2/authorize"
|
||||
OAUTH2_TOKEN = "https://api.authentication.husqvarnagroup.dev/v1/oauth2/token"
|
||||
|
||||
ERROR_STATES = [
|
||||
MowerStates.ERROR_AT_POWER_UP,
|
||||
MowerStates.ERROR,
|
||||
MowerStates.FATAL_ERROR,
|
||||
MowerStates.OFF,
|
||||
MowerStates.STOPPED,
|
||||
MowerStates.WAIT_POWER_UP,
|
||||
MowerStates.WAIT_UPDATING,
|
||||
]
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AutomowerConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, ERROR_STATES
|
||||
from .coordinator import AutomowerDataUpdateCoordinator
|
||||
from .entity import AutomowerAvailableEntity, handle_sending_exception
|
||||
|
||||
@@ -108,18 +108,28 @@ class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity):
|
||||
def activity(self) -> LawnMowerActivity:
|
||||
"""Return the state of the mower."""
|
||||
mower_attributes = self.mower_attributes
|
||||
if mower_attributes.mower.state in ERROR_STATES:
|
||||
return LawnMowerActivity.ERROR
|
||||
if mower_attributes.mower.state in PAUSED_STATES:
|
||||
return LawnMowerActivity.PAUSED
|
||||
if (mower_attributes.mower.state == "RESTRICTED") or (
|
||||
mower_attributes.mower.activity in DOCKED_ACTIVITIES
|
||||
if mower_attributes.mower.activity == MowerActivities.GOING_HOME:
|
||||
return LawnMowerActivity.RETURNING
|
||||
if (
|
||||
mower_attributes.mower.state is MowerStates.RESTRICTED
|
||||
or mower_attributes.mower.activity in DOCKED_ACTIVITIES
|
||||
):
|
||||
return LawnMowerActivity.DOCKED
|
||||
if mower_attributes.mower.state in MowerStates.IN_OPERATION:
|
||||
if mower_attributes.mower.activity == MowerActivities.GOING_HOME:
|
||||
return LawnMowerActivity.RETURNING
|
||||
return LawnMowerActivity.MOWING
|
||||
return LawnMowerActivity.ERROR
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return the available attribute of the entity."""
|
||||
return (
|
||||
super().available and self.mower_attributes.mower.state != MowerStates.OFF
|
||||
)
|
||||
|
||||
@property
|
||||
def work_areas(self) -> dict[int, WorkArea] | None:
|
||||
"""Return the work areas of the mower."""
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2025.6.0"]
|
||||
"requirements": ["aioautomower==1.0.1"]
|
||||
}
|
||||
|
||||
@@ -7,13 +7,7 @@ import logging
|
||||
from operator import attrgetter
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aioautomower.model import (
|
||||
MowerAttributes,
|
||||
MowerModes,
|
||||
MowerStates,
|
||||
RestrictedReasons,
|
||||
WorkArea,
|
||||
)
|
||||
from aioautomower.model import MowerAttributes, MowerModes, RestrictedReasons, WorkArea
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -27,6 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import AutomowerConfigEntry
|
||||
from .const import ERROR_STATES
|
||||
from .coordinator import AutomowerDataUpdateCoordinator
|
||||
from .entity import (
|
||||
AutomowerBaseEntity,
|
||||
@@ -166,15 +161,6 @@ ERROR_KEYS = [
|
||||
"zone_generator_problem",
|
||||
]
|
||||
|
||||
ERROR_STATES = [
|
||||
MowerStates.ERROR_AT_POWER_UP,
|
||||
MowerStates.ERROR,
|
||||
MowerStates.FATAL_ERROR,
|
||||
MowerStates.OFF,
|
||||
MowerStates.STOPPED,
|
||||
MowerStates.WAIT_POWER_UP,
|
||||
MowerStates.WAIT_UPDATING,
|
||||
]
|
||||
|
||||
ERROR_KEY_LIST = list(
|
||||
dict.fromkeys(ERROR_KEYS + [state.lower() for state in ERROR_STATES])
|
||||
|
||||
@@ -288,8 +288,10 @@ class ImageView(HomeAssistantView):
|
||||
"""Initialize an image view."""
|
||||
self.component = component
|
||||
|
||||
async def get(self, request: web.Request, entity_id: str) -> web.StreamResponse:
|
||||
"""Start a GET request."""
|
||||
async def _authenticate_request(
|
||||
self, request: web.Request, entity_id: str
|
||||
) -> ImageEntity:
|
||||
"""Authenticate request and return image entity."""
|
||||
if (image_entity := self.component.get_entity(entity_id)) is None:
|
||||
raise web.HTTPNotFound
|
||||
|
||||
@@ -306,6 +308,31 @@ class ImageView(HomeAssistantView):
|
||||
# Invalid sigAuth or image entity access token
|
||||
raise web.HTTPForbidden
|
||||
|
||||
return image_entity
|
||||
|
||||
async def head(self, request: web.Request, entity_id: str) -> web.Response:
|
||||
"""Start a HEAD request.
|
||||
|
||||
This is sent by some DLNA renderers, like Samsung ones, prior to sending
|
||||
the GET request.
|
||||
"""
|
||||
image_entity = await self._authenticate_request(request, entity_id)
|
||||
|
||||
# Don't use `handle` as we don't care about the stream case, we only want
|
||||
# to verify that the image exists.
|
||||
try:
|
||||
image = await _async_get_image(image_entity, IMAGE_TIMEOUT)
|
||||
except (HomeAssistantError, ValueError) as ex:
|
||||
raise web.HTTPInternalServerError from ex
|
||||
|
||||
return web.Response(
|
||||
content_type=image.content_type,
|
||||
headers={"Content-Length": str(len(image.content))},
|
||||
)
|
||||
|
||||
async def get(self, request: web.Request, entity_id: str) -> web.StreamResponse:
|
||||
"""Start a GET request."""
|
||||
image_entity = await self._authenticate_request(request, entity_id)
|
||||
return await self.handle(request, image_entity)
|
||||
|
||||
async def handle(
|
||||
@@ -317,7 +344,11 @@ class ImageView(HomeAssistantView):
|
||||
except (HomeAssistantError, ValueError) as ex:
|
||||
raise web.HTTPInternalServerError from ex
|
||||
|
||||
return web.Response(body=image.content, content_type=image.content_type)
|
||||
return web.Response(
|
||||
body=image.content,
|
||||
content_type=image.content_type,
|
||||
headers={"Content-Length": str(len(image.content))},
|
||||
)
|
||||
|
||||
|
||||
async def async_get_still_stream(
|
||||
|
||||
@@ -10,4 +10,8 @@ OHM = "Ω"
|
||||
DISCOVERY_SVC_UUID = "9eae1000-9d0d-48c5-aa55-33e27f9bc533"
|
||||
|
||||
MAX_TEMP: int = 450
|
||||
MAX_TEMP_F: int = 850
|
||||
MIN_TEMP: int = 10
|
||||
MIN_TEMP_F: int = 50
|
||||
MIN_BOOST_TEMP: int = 250
|
||||
MIN_BOOST_TEMP_F: int = 480
|
||||
|
||||
@@ -168,7 +168,9 @@ class IronOSSettingsCoordinator(IronOSBaseCoordinator[SettingsDataResponse]):
|
||||
|
||||
if self.device.is_connected and characteristics:
|
||||
try:
|
||||
return await self.device.get_settings(list(characteristics))
|
||||
return await self.device.get_settings(
|
||||
list(characteristics | {CharSetting.TEMP_UNIT})
|
||||
)
|
||||
except CommunicationError as e:
|
||||
_LOGGER.debug("Failed to fetch settings", exc_info=e)
|
||||
|
||||
|
||||
@@ -14,5 +14,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pynecil"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pynecil==4.1.0"]
|
||||
"requirements": ["pynecil==4.1.1"]
|
||||
}
|
||||
|
||||
@@ -6,10 +6,9 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
|
||||
from pynecil import CharSetting, LiveDataResponse, SettingsDataResponse
|
||||
from pynecil import CharSetting, LiveDataResponse, SettingsDataResponse, TempUnit
|
||||
|
||||
from homeassistant.components.number import (
|
||||
DEFAULT_MAX_VALUE,
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
@@ -24,9 +23,17 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
|
||||
from . import IronOSConfigEntry
|
||||
from .const import MAX_TEMP, MIN_TEMP
|
||||
from .const import (
|
||||
MAX_TEMP,
|
||||
MAX_TEMP_F,
|
||||
MIN_BOOST_TEMP,
|
||||
MIN_BOOST_TEMP_F,
|
||||
MIN_TEMP,
|
||||
MIN_TEMP_F,
|
||||
)
|
||||
from .coordinator import IronOSCoordinators
|
||||
from .entity import IronOSBaseEntity
|
||||
|
||||
@@ -38,9 +45,10 @@ class IronOSNumberEntityDescription(NumberEntityDescription):
|
||||
"""Describes IronOS number entity."""
|
||||
|
||||
value_fn: Callable[[LiveDataResponse, SettingsDataResponse], float | int | None]
|
||||
max_value_fn: Callable[[LiveDataResponse], float | int] | None = None
|
||||
characteristic: CharSetting
|
||||
raw_value_fn: Callable[[float], float | int] | None = None
|
||||
native_max_value_f: float | None = None
|
||||
native_min_value_f: float | None = None
|
||||
|
||||
|
||||
class PinecilNumber(StrEnum):
|
||||
@@ -74,44 +82,6 @@ def multiply(value: float | None, multiplier: float) -> float | None:
|
||||
|
||||
|
||||
PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.SETPOINT_TEMP,
|
||||
translation_key=PinecilNumber.SETPOINT_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda data, _: data.setpoint_temp,
|
||||
characteristic=CharSetting.SETPOINT_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_TEMP,
|
||||
native_step=5,
|
||||
max_value_fn=lambda data: min(data.max_tip_temp_ability or MAX_TEMP, MAX_TEMP),
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.SLEEP_TEMP,
|
||||
translation_key=PinecilNumber.SLEEP_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda _, settings: settings.get("sleep_temp"),
|
||||
characteristic=CharSetting.SLEEP_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_TEMP,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_step=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.BOOST_TEMP,
|
||||
translation_key=PinecilNumber.BOOST_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda _, settings: settings.get("boost_temp"),
|
||||
characteristic=CharSetting.BOOST_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=0,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_step=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.QC_MAX_VOLTAGE,
|
||||
translation_key=PinecilNumber.QC_MAX_VOLTAGE,
|
||||
@@ -296,32 +266,6 @@ PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.TEMP_INCREMENT_SHORT,
|
||||
translation_key=PinecilNumber.TEMP_INCREMENT_SHORT,
|
||||
value_fn=(lambda _, settings: settings.get("temp_increment_short")),
|
||||
characteristic=CharSetting.TEMP_INCREMENT_SHORT,
|
||||
raw_value_fn=lambda value: value,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=1,
|
||||
native_max_value=50,
|
||||
native_step=1,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.TEMP_INCREMENT_LONG,
|
||||
translation_key=PinecilNumber.TEMP_INCREMENT_LONG,
|
||||
value_fn=(lambda _, settings: settings.get("temp_increment_long")),
|
||||
characteristic=CharSetting.TEMP_INCREMENT_LONG,
|
||||
raw_value_fn=lambda value: value,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=5,
|
||||
native_max_value=90,
|
||||
native_step=5,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
),
|
||||
)
|
||||
|
||||
PINECIL_NUMBER_DESCRIPTIONS_V223: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
@@ -341,6 +285,82 @@ PINECIL_NUMBER_DESCRIPTIONS_V223: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
),
|
||||
)
|
||||
|
||||
"""
|
||||
The `device_class` attribute was removed from the `setpoint_temperature`, `sleep_temperature`, and `boost_temp` entities.
|
||||
These entities represent user-defined input values, not measured temperatures, and their
|
||||
interpretation depends on the device's current unit configuration. Applying a device_class
|
||||
results in automatic unit conversions, which introduce rounding errors due to the use of integers.
|
||||
This can prevent the correct value from being set, as the input is modified during synchronization with the device.
|
||||
"""
|
||||
PINECIL_TEMP_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.SLEEP_TEMP,
|
||||
translation_key=PinecilNumber.SLEEP_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda _, settings: settings.get("sleep_temp"),
|
||||
characteristic=CharSetting.SLEEP_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_TEMP,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_min_value_f=MIN_TEMP_F,
|
||||
native_max_value_f=MAX_TEMP_F,
|
||||
native_step=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.BOOST_TEMP,
|
||||
translation_key=PinecilNumber.BOOST_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda _, settings: settings.get("boost_temp"),
|
||||
characteristic=CharSetting.BOOST_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_BOOST_TEMP,
|
||||
native_min_value_f=MIN_BOOST_TEMP_F,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_max_value_f=MAX_TEMP_F,
|
||||
native_step=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.TEMP_INCREMENT_SHORT,
|
||||
translation_key=PinecilNumber.TEMP_INCREMENT_SHORT,
|
||||
value_fn=(lambda _, settings: settings.get("temp_increment_short")),
|
||||
characteristic=CharSetting.TEMP_INCREMENT_SHORT,
|
||||
raw_value_fn=lambda value: value,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=1,
|
||||
native_max_value=50,
|
||||
native_step=1,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.TEMP_INCREMENT_LONG,
|
||||
translation_key=PinecilNumber.TEMP_INCREMENT_LONG,
|
||||
value_fn=(lambda _, settings: settings.get("temp_increment_long")),
|
||||
characteristic=CharSetting.TEMP_INCREMENT_LONG,
|
||||
raw_value_fn=lambda value: value,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=5,
|
||||
native_max_value=90,
|
||||
native_step=5,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
)
|
||||
|
||||
PINECIL_SETPOINT_NUMBER_DESCRIPTION = IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.SETPOINT_TEMP,
|
||||
translation_key=PinecilNumber.SETPOINT_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data, _: data.setpoint_temp,
|
||||
characteristic=CharSetting.SETPOINT_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_TEMP,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_min_value_f=MIN_TEMP_F,
|
||||
native_max_value_f=MAX_TEMP_F,
|
||||
native_step=5,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -354,9 +374,18 @@ async def async_setup_entry(
|
||||
if coordinators.live_data.v223_features:
|
||||
descriptions += PINECIL_NUMBER_DESCRIPTIONS_V223
|
||||
|
||||
async_add_entities(
|
||||
entities = [
|
||||
IronOSNumberEntity(coordinators, description) for description in descriptions
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
IronOSTemperatureNumberEntity(coordinators, description)
|
||||
for description in PINECIL_TEMP_NUMBER_DESCRIPTIONS
|
||||
)
|
||||
entities.append(
|
||||
IronOSSetpointNumberEntity(coordinators, PINECIL_SETPOINT_NUMBER_DESCRIPTION)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class IronOSNumberEntity(IronOSBaseEntity, NumberEntity):
|
||||
@@ -388,15 +417,6 @@ class IronOSNumberEntity(IronOSBaseEntity, NumberEntity):
|
||||
self.coordinator.data, self.settings.data
|
||||
)
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return sensor state."""
|
||||
|
||||
if self.entity_description.max_value_fn is not None:
|
||||
return self.entity_description.max_value_fn(self.coordinator.data)
|
||||
|
||||
return self.entity_description.native_max_value or DEFAULT_MAX_VALUE
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
@@ -407,3 +427,60 @@ class IronOSNumberEntity(IronOSBaseEntity, NumberEntity):
|
||||
)
|
||||
)
|
||||
await self.settings.async_request_refresh()
|
||||
|
||||
|
||||
class IronOSTemperatureNumberEntity(IronOSNumberEntity):
|
||||
"""Implementation of a IronOS temperature number entity."""
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement of the sensor, if any."""
|
||||
|
||||
return (
|
||||
UnitOfTemperature.FAHRENHEIT
|
||||
if self.settings.data.get("temp_unit") is TempUnit.FAHRENHEIT
|
||||
else UnitOfTemperature.CELSIUS
|
||||
)
|
||||
|
||||
@property
|
||||
def native_min_value(self) -> float:
|
||||
"""Return the minimum value."""
|
||||
|
||||
return (
|
||||
self.entity_description.native_min_value_f
|
||||
if self.entity_description.native_min_value_f
|
||||
and self.native_unit_of_measurement is UnitOfTemperature.FAHRENHEIT
|
||||
else super().native_min_value
|
||||
)
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return the maximum value."""
|
||||
|
||||
return (
|
||||
self.entity_description.native_max_value_f
|
||||
if self.entity_description.native_max_value_f
|
||||
and self.native_unit_of_measurement is UnitOfTemperature.FAHRENHEIT
|
||||
else super().native_max_value
|
||||
)
|
||||
|
||||
|
||||
class IronOSSetpointNumberEntity(IronOSTemperatureNumberEntity):
|
||||
"""IronOS setpoint temperature entity."""
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return the maximum value."""
|
||||
|
||||
return (
|
||||
min(
|
||||
TemperatureConverter.convert(
|
||||
float(max_tip_c),
|
||||
UnitOfTemperature.CELSIUS,
|
||||
self.native_unit_of_measurement,
|
||||
),
|
||||
super().native_max_value,
|
||||
)
|
||||
if (max_tip_c := self.coordinator.data.max_tip_temp_ability) is not None
|
||||
else super().native_max_value
|
||||
)
|
||||
|
||||
@@ -108,22 +108,22 @@ def get_statistics(
|
||||
if monthly_consumptions := get_consumptions(data, value_type):
|
||||
return [
|
||||
{
|
||||
"value": as_number(
|
||||
get_values_by_type(
|
||||
consumptions=consumptions,
|
||||
consumption_type=consumption_type,
|
||||
).get(
|
||||
"additionalValue"
|
||||
if value_type == IstaValueType.ENERGY
|
||||
else "value"
|
||||
)
|
||||
),
|
||||
"value": as_number(value),
|
||||
"date": consumptions["date"],
|
||||
}
|
||||
for consumptions in monthly_consumptions
|
||||
if get_values_by_type(
|
||||
consumptions=consumptions,
|
||||
consumption_type=consumption_type,
|
||||
).get("additionalValue" if value_type == IstaValueType.ENERGY else "value")
|
||||
if (
|
||||
value := (
|
||||
consumption := get_values_by_type(
|
||||
consumptions=consumptions,
|
||||
consumption_type=consumption_type,
|
||||
)
|
||||
).get(
|
||||
"additionalValue"
|
||||
if value_type == IstaValueType.ENERGY
|
||||
and consumption.get("additionalValue") is not None
|
||||
else "value"
|
||||
)
|
||||
)
|
||||
]
|
||||
return None
|
||||
|
||||
@@ -66,8 +66,7 @@ def _connect_to_address(
|
||||
) -> dict[str, Any]:
|
||||
"""Connect to the Jellyfin server."""
|
||||
result: dict[str, Any] = connection_manager.connect_to_address(url)
|
||||
|
||||
if result["State"] != CONNECTION_STATE["ServerSignIn"]:
|
||||
if CONNECTION_STATE(result["State"]) != CONNECTION_STATE.ServerSignIn:
|
||||
raise CannotConnect
|
||||
|
||||
return result
|
||||
|
||||
@@ -54,6 +54,9 @@ class JellyfinDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, An
|
||||
self.api_client.jellyfin.sessions
|
||||
)
|
||||
|
||||
if sessions is None:
|
||||
return {}
|
||||
|
||||
sessions_by_id: dict[str, dict[str, Any]] = {
|
||||
session["Id"]: session
|
||||
for session in sessions
|
||||
|
||||
@@ -7,6 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["jellyfin_apiclient_python"],
|
||||
"requirements": ["jellyfin-apiclient-python==1.10.0"],
|
||||
"single_config_entry": true
|
||||
"requirements": ["jellyfin-apiclient-python==1.11.0"]
|
||||
}
|
||||
|
||||
@@ -91,7 +91,7 @@ from .schema import (
|
||||
TimeSchema,
|
||||
WeatherSchema,
|
||||
)
|
||||
from .services import register_knx_services
|
||||
from .services import async_setup_services
|
||||
from .storage.config_store import STORAGE_KEY as CONFIG_STORAGE_KEY, KNXConfigStore
|
||||
from .telegrams import STORAGE_KEY as TELEGRAMS_STORAGE_KEY, Telegrams
|
||||
from .websocket import register_panel
|
||||
@@ -138,7 +138,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if (conf := config.get(DOMAIN)) is not None:
|
||||
hass.data[_KNX_YAML_CONFIG] = dict(conf)
|
||||
|
||||
register_knx_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def register_knx_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register KNX integration services."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import CONF_SERVICE_CODE
|
||||
from .coordinator import PlenticoreConfigEntry, SettingDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -29,6 +30,7 @@ class PlenticoreSwitchEntityDescription(SwitchEntityDescription):
|
||||
on_label: str
|
||||
off_value: str
|
||||
off_label: str
|
||||
installer_required: bool = False
|
||||
|
||||
|
||||
SWITCH_SETTINGS_DATA = [
|
||||
@@ -42,6 +44,17 @@ SWITCH_SETTINGS_DATA = [
|
||||
off_value="2",
|
||||
off_label="Automatic economical",
|
||||
),
|
||||
PlenticoreSwitchEntityDescription(
|
||||
module_id="devices:local",
|
||||
key="Battery:ManualCharge",
|
||||
name="Battery Manual Charge",
|
||||
is_on="1",
|
||||
on_value="1",
|
||||
on_label="On",
|
||||
off_value="0",
|
||||
off_label="Off",
|
||||
installer_required=True,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -73,7 +86,13 @@ async def async_setup_entry(
|
||||
description.key,
|
||||
)
|
||||
continue
|
||||
|
||||
if entry.data.get(CONF_SERVICE_CODE) is None and description.installer_required:
|
||||
_LOGGER.debug(
|
||||
"Skipping installer required setting data %s/%s",
|
||||
description.module_id,
|
||||
description.key,
|
||||
)
|
||||
continue
|
||||
entities.append(
|
||||
PlenticoreDataSwitch(
|
||||
settings_data_update_coordinator,
|
||||
|
||||
@@ -2,8 +2,10 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import cast
|
||||
|
||||
from pylamarzocco.const import FirmwareType
|
||||
from pylamarzocco.const import FirmwareType, MachineState, WidgetType
|
||||
from pylamarzocco.models import MachineStatus
|
||||
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_MAC
|
||||
from homeassistant.helpers.device_registry import (
|
||||
@@ -32,6 +34,7 @@ class LaMarzoccoBaseEntity(
|
||||
"""Common elements for all entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_unavailable_when_machine_off = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -63,6 +66,21 @@ class LaMarzoccoBaseEntity(
|
||||
if connections:
|
||||
self._attr_device_info.update(DeviceInfo(connections=connections))
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
machine_state = (
|
||||
cast(
|
||||
MachineStatus,
|
||||
self.coordinator.device.dashboard.config[WidgetType.CM_MACHINE_STATUS],
|
||||
).status
|
||||
if WidgetType.CM_MACHINE_STATUS in self.coordinator.device.dashboard.config
|
||||
else MachineState.OFF
|
||||
)
|
||||
return super().available and not (
|
||||
self._unavailable_when_machine_off and machine_state is MachineState.OFF
|
||||
)
|
||||
|
||||
|
||||
class LaMarzoccoEntity(LaMarzoccoBaseEntity):
|
||||
"""Common elements for all entities."""
|
||||
|
||||
@@ -58,10 +58,6 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
CoffeeBoiler, machine.dashboard.config[WidgetType.CM_COFFEE_BOILER]
|
||||
).target_temperature
|
||||
),
|
||||
available_fn=(
|
||||
lambda coordinator: WidgetType.CM_COFFEE_BOILER
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
key="smart_standby_time",
|
||||
|
||||
@@ -57,10 +57,6 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
).ready_start_time
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
available_fn=(
|
||||
lambda coordinator: WidgetType.CM_COFFEE_BOILER
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="steam_boiler_ready_time",
|
||||
@@ -188,6 +184,8 @@ class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity):
|
||||
class LaMarzoccoStatisticSensorEntity(LaMarzoccoSensorEntity):
|
||||
"""Sensor for La Marzocco statistics."""
|
||||
|
||||
_unavailable_when_machine_off = False
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime | None:
|
||||
"""Return the value of the sensor."""
|
||||
|
||||
@@ -42,5 +42,5 @@ class LaMetricUpdate(LaMetricEntity, UpdateEntity):
|
||||
def latest_version(self) -> str | None:
|
||||
"""Return the latest version of the entity."""
|
||||
if not self.coordinator.data.update:
|
||||
return None
|
||||
return self.coordinator.data.os_version
|
||||
return self.coordinator.data.update.version
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pypck==0.8.9", "lcn-frontend==0.2.5"]
|
||||
"requirements": ["pypck==0.8.10", "lcn-frontend==0.2.5"]
|
||||
}
|
||||
|
||||
@@ -780,10 +780,10 @@
|
||||
"battery_level": {
|
||||
"name": "Battery",
|
||||
"state": {
|
||||
"high": "Full",
|
||||
"high": "[%key:common::state::full%]",
|
||||
"mid": "[%key:common::state::medium%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"warning": "Empty"
|
||||
"warning": "[%key:common::state::empty%]"
|
||||
}
|
||||
},
|
||||
"relative_to_start": {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user