mirror of
https://github.com/home-assistant/core.git
synced 2026-01-07 15:48:16 +00:00
Compare commits
1 Commits
hassio-spl
...
copilot-in
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b8cd4193a9 |
1
.github/copilot-instructions.md
vendored
1
.github/copilot-instructions.md
vendored
@@ -94,6 +94,7 @@ automation application.
|
||||
- Provide descriptive state attributes
|
||||
- Testing:
|
||||
- Test location: `tests/components/{domain}/`
|
||||
- Run tests with `pytest --no-header --no-summary -x <location>`
|
||||
- Use pytest fixtures from `tests.common`
|
||||
- Mock external dependencies
|
||||
- Use snapshots for complex data
|
||||
|
||||
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -324,7 +324,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.1
|
||||
uses: sigstore/cosign-installer@v3.8.2
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
|
||||
@@ -67,7 +67,6 @@ homeassistant.components.alert.*
|
||||
homeassistant.components.alexa.*
|
||||
homeassistant.components.alexa_devices.*
|
||||
homeassistant.components.alpha_vantage.*
|
||||
homeassistant.components.altruist.*
|
||||
homeassistant.components.amazon_polly.*
|
||||
homeassistant.components.amberelectric.*
|
||||
homeassistant.components.ambient_network.*
|
||||
@@ -503,7 +502,6 @@ homeassistant.components.tautulli.*
|
||||
homeassistant.components.tcp.*
|
||||
homeassistant.components.technove.*
|
||||
homeassistant.components.tedee.*
|
||||
homeassistant.components.telegram_bot.*
|
||||
homeassistant.components.text.*
|
||||
homeassistant.components.thethingsnetwork.*
|
||||
homeassistant.components.threshold.*
|
||||
|
||||
14
CODEOWNERS
generated
14
CODEOWNERS
generated
@@ -93,8 +93,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/homeassistant/components/alexa_devices/ @chemelli74
|
||||
/tests/components/alexa_devices/ @chemelli74
|
||||
/homeassistant/components/altruist/ @airalab @LoSk-p
|
||||
/tests/components/altruist/ @airalab @LoSk-p
|
||||
/homeassistant/components/amazon_polly/ @jschlyter
|
||||
/homeassistant/components/amberelectric/ @madpilot
|
||||
/tests/components/amberelectric/ @madpilot
|
||||
@@ -331,8 +329,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/demo/ @home-assistant/core
|
||||
/homeassistant/components/denonavr/ @ol-iver @starkillerOG
|
||||
/tests/components/denonavr/ @ol-iver @starkillerOG
|
||||
/homeassistant/components/derivative/ @afaucogney @karwosts
|
||||
/tests/components/derivative/ @afaucogney @karwosts
|
||||
/homeassistant/components/derivative/ @afaucogney
|
||||
/tests/components/derivative/ @afaucogney
|
||||
/homeassistant/components/devialet/ @fwestenberg
|
||||
/tests/components/devialet/ @fwestenberg
|
||||
/homeassistant/components/device_automation/ @home-assistant/core
|
||||
@@ -788,6 +786,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/jellyfin/ @RunC0deRun @ctalkington
|
||||
/homeassistant/components/jewish_calendar/ @tsvi
|
||||
/tests/components/jewish_calendar/ @tsvi
|
||||
/homeassistant/components/juicenet/ @jesserockz
|
||||
/tests/components/juicenet/ @jesserockz
|
||||
/homeassistant/components/justnimbus/ @kvanzuijlen
|
||||
/tests/components/justnimbus/ @kvanzuijlen
|
||||
/homeassistant/components/jvc_projector/ @SteveEasley @msavazzi
|
||||
@@ -1169,8 +1169,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
/tests/components/plaato/ @JohNan
|
||||
/homeassistant/components/playstation_network/ @jackjpowell
|
||||
/tests/components/playstation_network/ @jackjpowell
|
||||
/homeassistant/components/plex/ @jjlawren
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
@@ -1582,8 +1580,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tile/ @bachya
|
||||
/homeassistant/components/tilt_ble/ @apt-itude
|
||||
/tests/components/tilt_ble/ @apt-itude
|
||||
/homeassistant/components/tilt_pi/ @michaelheyman
|
||||
/tests/components/tilt_pi/ @michaelheyman
|
||||
/homeassistant/components/time/ @home-assistant/core
|
||||
/tests/components/time/ @home-assistant/core
|
||||
/homeassistant/components/time_date/ @fabaff
|
||||
@@ -1672,8 +1668,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||
/homeassistant/components/valve/ @home-assistant/core
|
||||
/tests/components/valve/ @home-assistant/core
|
||||
/homeassistant/components/vegehub/ @ghowevege
|
||||
/tests/components/vegehub/ @ghowevege
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
{
|
||||
"domain": "sony",
|
||||
"name": "Sony",
|
||||
"integrations": [
|
||||
"braviatv",
|
||||
"ps4",
|
||||
"sony_projector",
|
||||
"songpal",
|
||||
"playstation_network"
|
||||
]
|
||||
"integrations": ["braviatv", "ps4", "sony_projector", "songpal"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"domain": "switchbot",
|
||||
"name": "SwitchBot",
|
||||
"integrations": ["switchbot", "switchbot_cloud"],
|
||||
"iot_standards": ["matter"]
|
||||
"integrations": ["switchbot", "switchbot_cloud"]
|
||||
}
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "tilt",
|
||||
"name": "Tilt",
|
||||
"integrations": ["tilt_ble", "tilt_pi"]
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import (
|
||||
HassJobType,
|
||||
HomeAssistant,
|
||||
@@ -18,26 +17,18 @@ from homeassistant.helpers import config_validation as cv, storage
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
|
||||
|
||||
from .const import (
|
||||
ATTR_INSTRUCTIONS,
|
||||
ATTR_TASK_NAME,
|
||||
DATA_COMPONENT,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, DOMAIN, AITaskEntityFeature
|
||||
from .entity import AITaskEntity
|
||||
from .http import async_setup as async_setup_http
|
||||
from .task import GenDataTask, GenDataTaskResult, async_generate_data
|
||||
from .http import async_setup as async_setup_conversation_http
|
||||
from .task import GenTextTask, GenTextTaskResult, async_generate_text
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"AITaskEntity",
|
||||
"AITaskEntityFeature",
|
||||
"GenDataTask",
|
||||
"GenDataTaskResult",
|
||||
"async_generate_data",
|
||||
"GenTextTask",
|
||||
"GenTextTaskResult",
|
||||
"async_generate_text",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
"async_unload_entry",
|
||||
@@ -54,16 +45,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_http(hass)
|
||||
async_setup_conversation_http(hass)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
async_service_generate_data,
|
||||
"generate_text",
|
||||
async_service_generate_text,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Required("task_name"): cv.string,
|
||||
vol.Optional("entity_id"): cv.entity_id,
|
||||
vol.Required("instructions"): cv.string,
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
@@ -82,18 +73,18 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
||||
|
||||
async def async_service_generate_data(call: ServiceCall) -> ServiceResponse:
|
||||
async def async_service_generate_text(call: ServiceCall) -> ServiceResponse:
|
||||
"""Run the run task service."""
|
||||
result = await async_generate_data(hass=call.hass, **call.data)
|
||||
return result.as_dict()
|
||||
result = await async_generate_text(hass=call.hass, **call.data)
|
||||
return result.as_dict() # type: ignore[return-value]
|
||||
|
||||
|
||||
class AITaskPreferences:
|
||||
"""AI Task preferences."""
|
||||
|
||||
KEYS = ("gen_data_entity_id",)
|
||||
KEYS = ("gen_text_entity_id",)
|
||||
|
||||
gen_data_entity_id: str | None = None
|
||||
gen_text_entity_id: str | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the preferences."""
|
||||
@@ -113,11 +104,11 @@ class AITaskPreferences:
|
||||
def async_set_preferences(
|
||||
self,
|
||||
*,
|
||||
gen_data_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
gen_text_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Set the preferences."""
|
||||
changed = False
|
||||
for key, value in (("gen_data_entity_id", gen_data_entity_id),):
|
||||
for key, value in (("gen_text_entity_id", gen_text_entity_id),):
|
||||
if value is not UNDEFINED:
|
||||
if getattr(self, key) != value:
|
||||
setattr(self, key, value)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntFlag
|
||||
from typing import TYPE_CHECKING, Final
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -17,11 +17,6 @@ DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
|
||||
SERVICE_GENERATE_DATA = "generate_data"
|
||||
|
||||
ATTR_INSTRUCTIONS: Final = "instructions"
|
||||
ATTR_TASK_NAME: Final = "task_name"
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a Home Assistant expert and help users with their tasks."
|
||||
)
|
||||
@@ -30,5 +25,5 @@ DEFAULT_SYSTEM_PROMPT = (
|
||||
class AITaskEntityFeature(IntFlag):
|
||||
"""Supported features of the AI task entity."""
|
||||
|
||||
GENERATE_DATA = 1
|
||||
"""Generate data based on instructions."""
|
||||
GENERATE_TEXT = 1
|
||||
"""Generate text based on instructions."""
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN, AITaskEntityFeature
|
||||
from .task import GenDataTask, GenDataTaskResult
|
||||
from .task import GenTextTask, GenTextTaskResult
|
||||
|
||||
|
||||
class AITaskEntity(RestoreEntity):
|
||||
@@ -56,7 +56,7 @@ class AITaskEntity(RestoreEntity):
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
task: GenDataTask,
|
||||
task: GenTextTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
@@ -84,20 +84,20 @@ class AITaskEntity(RestoreEntity):
|
||||
yield chat_log
|
||||
|
||||
@final
|
||||
async def internal_async_generate_data(
|
||||
async def internal_async_generate_text(
|
||||
self,
|
||||
task: GenDataTask,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a gen data task."""
|
||||
task: GenTextTask,
|
||||
) -> GenTextTaskResult:
|
||||
"""Run a gen text task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(task) as chat_log:
|
||||
return await self._async_generate_data(task, chat_log)
|
||||
return await self._async_generate_text(task, chat_log)
|
||||
|
||||
async def _async_generate_data(
|
||||
async def _async_generate_text(
|
||||
self,
|
||||
task: GenDataTask,
|
||||
task: GenTextTask,
|
||||
chat_log: ChatLog,
|
||||
) -> GenDataTaskResult:
|
||||
"""Handle a gen data task."""
|
||||
) -> GenTextTaskResult:
|
||||
"""Handle a gen text task."""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -36,7 +36,7 @@ def websocket_get_preferences(
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/set",
|
||||
vol.Optional("gen_data_entity_id"): vol.Any(str, None),
|
||||
vol.Optional("gen_text_entity_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"services": {
|
||||
"generate_data": {
|
||||
"generate_text": {
|
||||
"service": "mdi:file-star-four-points-outline"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
generate_data:
|
||||
generate_text:
|
||||
fields:
|
||||
task_name:
|
||||
example: "home summary"
|
||||
@@ -6,7 +6,7 @@ generate_data:
|
||||
selector:
|
||||
text:
|
||||
instructions:
|
||||
example: "Generate a funny notification that the garage door was left open"
|
||||
example: "Generate a funny notification that garage door was left open"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
@@ -16,4 +16,4 @@ generate_data:
|
||||
entity:
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
- ai_task.AITaskEntityFeature.GENERATE_TEXT
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"services": {
|
||||
"generate_data": {
|
||||
"name": "Generate data",
|
||||
"description": "Uses AI to run a task that generates data.",
|
||||
"generate_text": {
|
||||
"name": "Generate text",
|
||||
"description": "Use AI to run a task that generates text.",
|
||||
"fields": {
|
||||
"task_name": {
|
||||
"name": "Task name",
|
||||
"name": "Task Name",
|
||||
"description": "Name of the task."
|
||||
},
|
||||
"instructions": {
|
||||
|
||||
@@ -3,39 +3,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
async def async_generate_text(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
) -> GenDataTaskResult:
|
||||
) -> GenTextTaskResult:
|
||||
"""Run a task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_data_entity_id
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_text_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise HomeAssistantError("No entity_id provided and no preferred entity set")
|
||||
raise ValueError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
raise ValueError(f"AI Task entity {entity_id} not found")
|
||||
|
||||
if AITaskEntityFeature.GENERATE_DATA not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support generating data"
|
||||
)
|
||||
if AITaskEntityFeature.GENERATE_TEXT not in entity.supported_features:
|
||||
raise ValueError(f"AI Task entity {entity_id} does not support generating text")
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
GenDataTask(
|
||||
return await entity.internal_async_generate_text(
|
||||
GenTextTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
)
|
||||
@@ -43,8 +39,8 @@ async def async_generate_data(
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenDataTask:
|
||||
"""Gen data task to be processed."""
|
||||
class GenTextTask:
|
||||
"""Gen text task to be processed."""
|
||||
|
||||
name: str
|
||||
"""Name of the task."""
|
||||
@@ -54,22 +50,22 @@ class GenDataTask:
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
return f"<GenTextTask {self.name}: {id(self)}>"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenDataTaskResult:
|
||||
"""Result of gen data task."""
|
||||
class GenTextTaskResult:
|
||||
"""Result of gen text task."""
|
||||
|
||||
conversation_id: str
|
||||
"""Unique identifier for the conversation."""
|
||||
|
||||
data: Any
|
||||
"""Data generated by the task."""
|
||||
text: str
|
||||
"""Generated text."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
def as_dict(self) -> dict[str, str]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"conversation_id": self.conversation_id,
|
||||
"data": self.data,
|
||||
"text": self.text,
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.NOTIFY,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
"""Support for sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import LIGHT_LUX, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonSensorEntityDescription(SensorEntityDescription):
|
||||
"""Amazon Devices sensor entity description."""
|
||||
|
||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||
|
||||
|
||||
SENSORS: Final = (
|
||||
AmazonSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement_fn=lambda device, _key: (
|
||||
UnitOfTemperature.CELSIUS
|
||||
if device.sensors[_key].scale == "CELSIUS"
|
||||
else UnitOfTemperature.FAHRENHEIT
|
||||
),
|
||||
),
|
||||
AmazonSensorEntityDescription(
|
||||
key="illuminance",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Amazon Devices sensors based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AmazonSensorEntity(coordinator, serial_num, sensor_desc)
|
||||
for sensor_desc in SENSORS
|
||||
for serial_num in coordinator.data
|
||||
if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None
|
||||
)
|
||||
|
||||
|
||||
class AmazonSensorEntity(AmazonEntity, SensorEntity):
|
||||
"""Sensor device."""
|
||||
|
||||
entity_description: AmazonSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement of the sensor."""
|
||||
if self.entity_description.native_unit_of_measurement_fn:
|
||||
return self.entity_description.native_unit_of_measurement_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
|
||||
return super().native_unit_of_measurement
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.device.sensors[self.entity_description.key].value
|
||||
@@ -1,27 +0,0 @@
|
||||
"""The Altruist integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AltruistConfigEntry, AltruistDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AltruistConfigEntry) -> bool:
|
||||
"""Set up Altruist from a config entry."""
|
||||
|
||||
coordinator = AltruistDataUpdateCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AltruistConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,107 +0,0 @@
|
||||
"""Config flow for the Altruist integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from altruistclient import AltruistClient, AltruistDeviceModel, AltruistError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_HOST, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AltruistConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Altruist."""
|
||||
|
||||
device: AltruistDeviceModel
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
ip_address = ""
|
||||
if user_input is not None:
|
||||
ip_address = user_input[CONF_HOST]
|
||||
try:
|
||||
client = await AltruistClient.from_ip_address(
|
||||
async_get_clientsession(self.hass), ip_address
|
||||
)
|
||||
except AltruistError:
|
||||
errors["base"] = "no_device_found"
|
||||
else:
|
||||
self.device = client.device
|
||||
await self.async_set_unique_id(
|
||||
client.device_id, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=self.device.id,
|
||||
data={
|
||||
CONF_HOST: ip_address,
|
||||
},
|
||||
)
|
||||
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
vol.Schema({vol.Required(CONF_HOST): str}),
|
||||
{CONF_HOST: ip_address},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"ip_address": ip_address,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
_LOGGER.debug("Zeroconf discovery: %s", discovery_info)
|
||||
try:
|
||||
client = await AltruistClient.from_ip_address(
|
||||
async_get_clientsession(self.hass), str(discovery_info.ip_address)
|
||||
)
|
||||
except AltruistError:
|
||||
return self.async_abort(reason="no_device_found")
|
||||
|
||||
self.device = client.device
|
||||
_LOGGER.debug("Zeroconf device: %s", client.device)
|
||||
await self.async_set_unique_id(client.device_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
self.context.update(
|
||||
{
|
||||
"title_placeholders": {
|
||||
"name": self.device.id,
|
||||
}
|
||||
}
|
||||
)
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self.device.id,
|
||||
data={
|
||||
CONF_HOST: self.device.ip_address,
|
||||
},
|
||||
)
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
description_placeholders={
|
||||
"model": self.device.id,
|
||||
},
|
||||
)
|
||||
@@ -1,5 +0,0 @@
|
||||
"""Constants for the Altruist integration."""
|
||||
|
||||
DOMAIN = "altruist"
|
||||
|
||||
CONF_HOST = "host"
|
||||
@@ -1,64 +0,0 @@
|
||||
"""Coordinator module for Altruist integration in Home Assistant.
|
||||
|
||||
This module defines the AltruistDataUpdateCoordinator class, which manages
|
||||
data updates for Altruist sensors using the AltruistClient.
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from altruistclient import AltruistClient, AltruistError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_HOST
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
type AltruistConfigEntry = ConfigEntry[AltruistDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AltruistDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str]]):
|
||||
"""Coordinates data updates for Altruist sensors."""
|
||||
|
||||
client: AltruistClient
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AltruistConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the data update coordinator for Altruist sensors."""
|
||||
device_id = config_entry.unique_id
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"Altruist {device_id}",
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
)
|
||||
self._ip_address = config_entry.data[CONF_HOST]
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
try:
|
||||
self.client = await AltruistClient.from_ip_address(
|
||||
async_get_clientsession(self.hass), self._ip_address
|
||||
)
|
||||
await self.client.fetch_data()
|
||||
except AltruistError as e:
|
||||
raise ConfigEntryNotReady("Error in Altruist setup") from e
|
||||
|
||||
async def _async_update_data(self) -> dict[str, str]:
|
||||
try:
|
||||
fetched_data = await self.client.fetch_data()
|
||||
except AltruistError as ex:
|
||||
raise UpdateFailed(
|
||||
f"The Altruist {self.client.device_id} is unavailable: {ex}"
|
||||
) from ex
|
||||
return {item["value_type"]: item["value"] for item in fetched_data}
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"pm_10": {
|
||||
"default": "mdi:thought-bubble"
|
||||
},
|
||||
"pm_25": {
|
||||
"default": "mdi:thought-bubble-outline"
|
||||
},
|
||||
"radiation": {
|
||||
"default": "mdi:radioactive"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"domain": "altruist",
|
||||
"name": "Altruist",
|
||||
"codeowners": ["@airalab", "@LoSk-p"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/altruist",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["altruistclient==0.1.1"],
|
||||
"zeroconf": ["_altruist._tcp.local."]
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration
|
||||
entity-category: todo
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No known use cases for repair issues or flows, yet
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
@@ -1,249 +0,0 @@
|
||||
"""Defines the Altruist sensor platform."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
PERCENTAGE,
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
EntityCategory,
|
||||
UnitOfPressure,
|
||||
UnitOfSoundPressure,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AltruistConfigEntry
|
||||
from .coordinator import AltruistDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AltruistSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class to describe a Sensor entity."""
|
||||
|
||||
native_value_fn: Callable[[str], float] = float
|
||||
state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = [
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
key="BME280_humidity",
|
||||
translation_key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BME280"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
key="BME280_pressure",
|
||||
translation_key="pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.PA,
|
||||
suggested_unit_of_measurement=UnitOfPressure.MMHG,
|
||||
suggested_display_precision=0,
|
||||
translation_placeholders={"sensor_name": "BME280"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
key="BME280_temperature",
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BME280"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
key="BMP_pressure",
|
||||
translation_key="pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.PA,
|
||||
suggested_unit_of_measurement=UnitOfPressure.MMHG,
|
||||
suggested_display_precision=0,
|
||||
translation_placeholders={"sensor_name": "BMP"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
key="BMP_temperature",
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BMP"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
key="BMP280_temperature",
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BMP280"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
key="BMP280_pressure",
|
||||
translation_key="pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.PA,
|
||||
suggested_unit_of_measurement=UnitOfPressure.MMHG,
|
||||
suggested_display_precision=0,
|
||||
translation_placeholders={"sensor_name": "BMP280"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
key="HTU21D_humidity",
|
||||
translation_key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "HTU21D"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
key="HTU21D_temperature",
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "HTU21D"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
translation_key="pm_10",
|
||||
key="SDS_P1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
translation_key="pm_25",
|
||||
key="SDS_P2",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
key="SHT3X_humidity",
|
||||
translation_key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "SHT3X"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
key="SHT3X_temperature",
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "SHT3X"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
key="signal",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
key="PCBA_noiseMax",
|
||||
translation_key="noise_max",
|
||||
native_unit_of_measurement=UnitOfSoundPressure.DECIBEL,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
key="PCBA_noiseAvg",
|
||||
translation_key="noise_avg",
|
||||
native_unit_of_measurement=UnitOfSoundPressure.DECIBEL,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
translation_key="co2",
|
||||
key="CCS_CO2",
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "CCS"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||
key="CCS_TVOC",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
key="GC",
|
||||
native_unit_of_measurement="μR/h",
|
||||
translation_key="radiation",
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
translation_key="co2",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
key="SCD4x_co2",
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "SCD4x"},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AltruistConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add sensors for passed config_entry in HA."""
|
||||
coordinator = config_entry.runtime_data
|
||||
async_add_entities(
|
||||
AltruistSensor(coordinator, sensor_description)
|
||||
for sensor_description in SENSOR_DESCRIPTIONS
|
||||
if sensor_description.key in coordinator.data
|
||||
)
|
||||
|
||||
|
||||
class AltruistSensor(CoordinatorEntity[AltruistDataUpdateCoordinator], SensorEntity):
|
||||
"""Implementation of a Altruist sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AltruistDataUpdateCoordinator,
|
||||
description: AltruistSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the Altruist sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._device = coordinator.client.device
|
||||
self.entity_description: AltruistSensorEntityDescription = description
|
||||
self._attr_unique_id = f"{self._device.id}-{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, self._device.id)},
|
||||
manufacturer="Robonomics",
|
||||
model="Altruist",
|
||||
sw_version=self._device.fw_version,
|
||||
configuration_url=f"http://{self._device.ip_address}",
|
||||
serial_number=self._device.id,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available and self.entity_description.key in self.coordinator.data
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | int:
|
||||
"""Return the native value of the sensor."""
|
||||
string_value = self.coordinator.data[self.entity_description.key]
|
||||
return self.entity_description.native_value_fn(string_value)
|
||||
@@ -1,51 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "{name}",
|
||||
"step": {
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to start setup {model}?"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "Altruist IP address or hostname in the local network"
|
||||
},
|
||||
"description": "Fill in Altruist IP address or hostname in your local network"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"no_device_found": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"humidity": {
|
||||
"name": "{sensor_name} humidity"
|
||||
},
|
||||
"pressure": {
|
||||
"name": "{sensor_name} pressure"
|
||||
},
|
||||
"temperature": {
|
||||
"name": "{sensor_name} temperature"
|
||||
},
|
||||
"noise_max": {
|
||||
"name": "Maximum noise"
|
||||
},
|
||||
"noise_avg": {
|
||||
"name": "Average noise"
|
||||
},
|
||||
"co2": {
|
||||
"name": "{sensor_name} CO2"
|
||||
},
|
||||
"radiation": {
|
||||
"name": "Radiation level"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.camera import CameraEntityFeature
|
||||
from homeassistant.components.mjpeg import MjpegCamera, filter_urllib3_logging
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -32,7 +31,6 @@ class IPWebcamCamera(MjpegCamera):
|
||||
"""Representation of a IP Webcam camera."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = CameraEntityFeature.STREAM
|
||||
|
||||
def __init__(self, coordinator: AndroidIPCamDataUpdateCoordinator) -> None:
|
||||
"""Initialize the camera."""
|
||||
@@ -48,17 +46,3 @@ class IPWebcamCamera(MjpegCamera):
|
||||
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
|
||||
name=coordinator.config_entry.data[CONF_HOST],
|
||||
)
|
||||
self._coordinator = coordinator
|
||||
|
||||
async def stream_source(self) -> str:
|
||||
"""Get the stream source for the Android IP camera."""
|
||||
return self._coordinator.cam.get_rtsp_url(
|
||||
video_codec="h264", # most compatible & recommended
|
||||
# while "opus" is compatible with more devices,
|
||||
# HA's stream integration requires AAC or MP3,
|
||||
# and IP webcam doesn't provide MP3 audio.
|
||||
# aac is supported on select devices >= android 4.1.
|
||||
# The stream will be quiet on devices that don't support aac,
|
||||
# but it won't fail.
|
||||
audio_codec="aac",
|
||||
)
|
||||
|
||||
@@ -6,16 +6,11 @@ from functools import partial
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
|
||||
|
||||
@@ -25,24 +20,13 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Anthropic."""
|
||||
await async_migrate_integration(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
# Use model from first conversation subentry for validation
|
||||
subentries = list(entry.subentries.values())
|
||||
if subentries:
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
else:
|
||||
model_id = RECOMMENDED_CHAT_MODEL
|
||||
model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
except anthropic.AuthenticationError as err:
|
||||
@@ -61,68 +45,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload Anthropic."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
"""Migrate integration entry structure."""
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
if not any(entry.version == 1 for entry in entries):
|
||||
return
|
||||
|
||||
api_keys_entries: dict[str, ConfigEntry] = {}
|
||||
entity_registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
for entry in entries:
|
||||
use_existing = False
|
||||
subentry = ConfigSubentry(
|
||||
data=entry.options,
|
||||
subentry_type="conversation",
|
||||
title=entry.title,
|
||||
unique_id=None,
|
||||
)
|
||||
if entry.data[CONF_API_KEY] not in api_keys_entries:
|
||||
use_existing = True
|
||||
api_keys_entries[entry.data[CONF_API_KEY]] = entry
|
||||
|
||||
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]
|
||||
|
||||
hass.config_entries.async_add_subentry(parent_entry, subentry)
|
||||
conversation_entity = entity_registry.async_get_entity_id(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
entry.entry_id,
|
||||
)
|
||||
if conversation_entity is not None:
|
||||
entity_registry.async_update_entity(
|
||||
conversation_entity,
|
||||
config_entry_id=parent_entry.entry_id,
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
new_unique_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, entry.entry_id)}
|
||||
)
|
||||
if device is not None:
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
new_identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
add_config_subentry_id=subentry.subentry_id,
|
||||
add_config_entry_id=parent_entry.entry_id,
|
||||
)
|
||||
if parent_entry.entry_id != entry.entry_id:
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
)
|
||||
|
||||
if not use_existing:
|
||||
await hass.config_entries.async_remove(entry.entry_id)
|
||||
else:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
options={},
|
||||
version=2,
|
||||
)
|
||||
|
||||
@@ -5,21 +5,20 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.selector import (
|
||||
NumberSelector,
|
||||
@@ -37,7 +36,6 @@ from .const import (
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
@@ -74,7 +72,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anthropic."""
|
||||
|
||||
VERSION = 2
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -83,7 +81,6 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(user_input)
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except anthropic.APITimeoutError:
|
||||
@@ -105,93 +102,57 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title="Claude",
|
||||
data=user_input,
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
],
|
||||
options=RECOMMENDED_OPTIONS,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"conversation": ConversationSubentryFlowHandler}
|
||||
@staticmethod
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return AnthropicOptionsFlow(config_entry)
|
||||
|
||||
|
||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Flow for managing conversation subentries."""
|
||||
class AnthropicOptionsFlow(OptionsFlow):
|
||||
"""Anthropic config flow options handler."""
|
||||
|
||||
last_rendered_recommended = False
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.last_rendered_recommended = config_entry.options.get(
|
||||
CONF_RECOMMENDED, False
|
||||
)
|
||||
|
||||
@property
|
||||
def _is_new(self) -> bool:
|
||||
"""Return if this is a new subentry."""
|
||||
return self.source == "user"
|
||||
|
||||
async def async_step_set_options(
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Set conversation options."""
|
||||
# abort if entry is not loaded
|
||||
if self._get_entry().state != ConfigEntryState.LOADED:
|
||||
return self.async_abort(reason="entry_not_loaded")
|
||||
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is None:
|
||||
if self._is_new:
|
||||
options = RECOMMENDED_OPTIONS.copy()
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
if user_input.get(
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
else:
|
||||
# If this is a reconfiguration, we need to copy the existing options
|
||||
# so that we can show the current values in the form.
|
||||
options = self._get_reconfigure_subentry().data.copy()
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
self.last_rendered_recommended = cast(
|
||||
bool, options.get(CONF_RECOMMENDED, False)
|
||||
)
|
||||
|
||||
elif user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
if user_input.get(
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
|
||||
if not errors:
|
||||
if self._is_new:
|
||||
return self.async_create_entry(
|
||||
title=user_input.pop(CONF_NAME),
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
options = user_input
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
else:
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
||||
}
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
||||
}
|
||||
|
||||
suggested_values = options.copy()
|
||||
if not suggested_values.get(CONF_PROMPT):
|
||||
@@ -202,25 +163,19 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
anthropic_config_option_schema(self.hass, self._is_new, options)
|
||||
),
|
||||
vol.Schema(anthropic_config_option_schema(self.hass, options)),
|
||||
suggested_values,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="set_options",
|
||||
step_id="init",
|
||||
data_schema=schema,
|
||||
errors=errors or None,
|
||||
)
|
||||
|
||||
async_step_user = async_step_set_options
|
||||
async_step_reconfigure = async_step_set_options
|
||||
|
||||
|
||||
def anthropic_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
is_new: bool,
|
||||
options: Mapping[str, Any],
|
||||
) -> dict:
|
||||
"""Return a schema for Anthropic completion options."""
|
||||
@@ -232,24 +187,15 @@ def anthropic_config_option_schema(
|
||||
for api in llm.async_get_apis(hass)
|
||||
]
|
||||
|
||||
if is_new:
|
||||
schema: dict[vol.Required | vol.Optional, Any] = {
|
||||
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
|
||||
}
|
||||
else:
|
||||
schema = {}
|
||||
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
schema = {
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
}
|
||||
|
||||
if options.get(CONF_RECOMMENDED):
|
||||
return schema
|
||||
|
||||
@@ -5,8 +5,6 @@ import logging
|
||||
DOMAIN = "anthropic"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
||||
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
|
||||
@@ -38,7 +38,7 @@ from anthropic.types import (
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -72,14 +72,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up conversation entities."""
|
||||
for subentry in config_entry.subentries.values():
|
||||
if subentry.subentry_type != "conversation":
|
||||
continue
|
||||
|
||||
async_add_entities(
|
||||
[AnthropicConversationEntity(config_entry, subentry)],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
agent = AnthropicConversationEntity(config_entry)
|
||||
async_add_entities([agent])
|
||||
|
||||
|
||||
def _format_tool(
|
||||
@@ -332,22 +326,21 @@ class AnthropicConversationEntity(
|
||||
):
|
||||
"""Anthropic conversation agent."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supports_streaming = True
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
def __init__(self, entry: AnthropicConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
if self.subentry.data.get(CONF_LLM_HASS_API):
|
||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
)
|
||||
@@ -370,7 +363,7 @@ class AnthropicConversationEntity(
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> conversation.ConversationResult:
|
||||
"""Call the API."""
|
||||
options = self.subentry.data
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_provide_llm_data(
|
||||
@@ -400,7 +393,7 @@ class AnthropicConversationEntity(
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
options = self.entry.options
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
|
||||
@@ -12,44 +12,28 @@
|
||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
||||
"authentication_error": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"conversation": {
|
||||
"initiate_flow": {
|
||||
"user": "Add conversation agent",
|
||||
"reconfigure": "Reconfigure conversation agent"
|
||||
},
|
||||
"entry_type": "Conversation agent",
|
||||
|
||||
"step": {
|
||||
"set_options": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"prompt": "Instructions",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"recommended": "Recommended model settings",
|
||||
"thinking_budget_tokens": "Thinking budget"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
||||
}
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"prompt": "Instructions",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"recommended": "Recommended model settings",
|
||||
"thinking_budget_tokens": "Thinking budget"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"entry_not_loaded": "Cannot add things while the configuration is disabled."
|
||||
},
|
||||
"error": {
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bosch_alarm",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "platinum",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["bosch-alarm-mode2==0.4.6"]
|
||||
}
|
||||
|
||||
@@ -28,41 +28,38 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
No options flow is provided.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
||||
@@ -498,6 +498,19 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> CameraEntityFeature:
|
||||
"""Return the supported features as CameraEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int:
|
||||
new_features = CameraEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
@cached_property
|
||||
def is_recording(self) -> bool:
|
||||
"""Return true if the device is recording."""
|
||||
@@ -691,7 +704,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
@@ -720,7 +735,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
|
||||
) -> _T | None:
|
||||
"""Get first provider that supports this camera."""
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
|
||||
return await fn(self.hass, self)
|
||||
@@ -770,7 +785,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def camera_capabilities(self) -> CameraCapabilities:
|
||||
"""Return the camera capabilities."""
|
||||
frontend_stream_types = set()
|
||||
if CameraEntityFeature.STREAM in self.supported_features:
|
||||
if CameraEntityFeature.STREAM in self.supported_features_compat:
|
||||
if self._supports_native_async_webrtc:
|
||||
# The camera has a native WebRTC implementation
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
@@ -790,7 +805,8 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features & CameraEntityFeature.STREAM
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.6.23"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.6.10"]
|
||||
}
|
||||
|
||||
@@ -26,7 +26,6 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_MAX_SUB_INTERVAL,
|
||||
CONF_ROUND_DIGITS,
|
||||
CONF_TIME_WINDOW,
|
||||
CONF_UNIT_PREFIX,
|
||||
@@ -105,9 +104,6 @@ async def _get_options_dict(handler: SchemaCommonFlowHandler | None) -> dict:
|
||||
options=TIME_UNITS, translation_key="time_unit"
|
||||
),
|
||||
),
|
||||
vol.Optional(CONF_MAX_SUB_INTERVAL): selector.DurationSelector(
|
||||
selector.DurationSelectorConfig(allow_negative=False)
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -7,4 +7,3 @@ CONF_TIME_WINDOW = "time_window"
|
||||
CONF_UNIT = "unit"
|
||||
CONF_UNIT_PREFIX = "unit_prefix"
|
||||
CONF_UNIT_TIME = "unit_time"
|
||||
CONF_MAX_SUB_INTERVAL = "max_sub_interval"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "derivative",
|
||||
"name": "Derivative",
|
||||
"after_dependencies": ["counter"],
|
||||
"codeowners": ["@afaucogney", "@karwosts"],
|
||||
"codeowners": ["@afaucogney"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/derivative",
|
||||
"integration_type": "helper",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal, DecimalException, InvalidOperation
|
||||
from decimal import Decimal, DecimalException
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -25,7 +25,6 @@ from homeassistant.const import (
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
EventStateReportedData,
|
||||
@@ -41,14 +40,12 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.event import (
|
||||
async_call_later,
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
CONF_MAX_SUB_INTERVAL,
|
||||
CONF_ROUND_DIGITS,
|
||||
CONF_TIME_WINDOW,
|
||||
CONF_UNIT,
|
||||
@@ -92,20 +89,10 @@ PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
vol.Optional(CONF_UNIT_TIME, default=UnitOfTime.HOURS): vol.In(UNIT_TIME),
|
||||
vol.Optional(CONF_UNIT): cv.string,
|
||||
vol.Optional(CONF_TIME_WINDOW, default=DEFAULT_TIME_WINDOW): cv.time_period,
|
||||
vol.Optional(CONF_MAX_SUB_INTERVAL): cv.positive_time_period,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _is_decimal_state(state: str) -> bool:
|
||||
try:
|
||||
Decimal(state)
|
||||
except (InvalidOperation, TypeError):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -127,11 +114,6 @@ async def async_setup_entry(
|
||||
# Before we had support for optional selectors, "none" was used for selecting nothing
|
||||
unit_prefix = None
|
||||
|
||||
if max_sub_interval_dict := config_entry.options.get(CONF_MAX_SUB_INTERVAL, None):
|
||||
max_sub_interval = cv.time_period(max_sub_interval_dict)
|
||||
else:
|
||||
max_sub_interval = None
|
||||
|
||||
derivative_sensor = DerivativeSensor(
|
||||
name=config_entry.title,
|
||||
round_digits=int(config_entry.options[CONF_ROUND_DIGITS]),
|
||||
@@ -142,7 +124,6 @@ async def async_setup_entry(
|
||||
unit_prefix=unit_prefix,
|
||||
unit_time=config_entry.options[CONF_UNIT_TIME],
|
||||
device_info=device_info,
|
||||
max_sub_interval=max_sub_interval,
|
||||
)
|
||||
|
||||
async_add_entities([derivative_sensor])
|
||||
@@ -164,7 +145,6 @@ async def async_setup_platform(
|
||||
unit_prefix=config[CONF_UNIT_PREFIX],
|
||||
unit_time=config[CONF_UNIT_TIME],
|
||||
unique_id=None,
|
||||
max_sub_interval=config.get(CONF_MAX_SUB_INTERVAL),
|
||||
)
|
||||
|
||||
async_add_entities([derivative])
|
||||
@@ -186,7 +166,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
unit_of_measurement: str | None,
|
||||
unit_prefix: str | None,
|
||||
unit_time: UnitOfTime,
|
||||
max_sub_interval: timedelta | None,
|
||||
unique_id: str | None,
|
||||
device_info: DeviceInfo | None = None,
|
||||
) -> None:
|
||||
@@ -213,34 +192,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._unit_prefix = UNIT_PREFIXES[unit_prefix]
|
||||
self._unit_time = UNIT_TIME[unit_time]
|
||||
self._time_window = time_window.total_seconds()
|
||||
self._max_sub_interval: timedelta | None = (
|
||||
None # disable time based derivative
|
||||
if max_sub_interval is None or max_sub_interval.total_seconds() == 0
|
||||
else max_sub_interval
|
||||
)
|
||||
self._cancel_max_sub_interval_exceeded_callback: CALLBACK_TYPE = (
|
||||
lambda *args: None
|
||||
)
|
||||
|
||||
def _calc_derivative_from_state_list(self, current_time: datetime) -> Decimal:
|
||||
def calculate_weight(start: datetime, end: datetime, now: datetime) -> float:
|
||||
window_start = now - timedelta(seconds=self._time_window)
|
||||
return (end - max(start, window_start)).total_seconds() / self._time_window
|
||||
|
||||
derivative = Decimal("0.00")
|
||||
for start, end, value in self._state_list:
|
||||
weight = calculate_weight(start, end, current_time)
|
||||
derivative = derivative + (value * Decimal(weight))
|
||||
|
||||
return derivative
|
||||
|
||||
def _prune_state_list(self, current_time: datetime) -> None:
|
||||
# filter out all derivatives older than `time_window` from our window list
|
||||
self._state_list = [
|
||||
(time_start, time_end, state)
|
||||
for time_start, time_end, state in self._state_list
|
||||
if (current_time - time_end).total_seconds() < self._time_window
|
||||
]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
@@ -258,52 +209,13 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
except SyntaxError as err:
|
||||
_LOGGER.warning("Could not restore last state: %s", err)
|
||||
|
||||
def schedule_max_sub_interval_exceeded(source_state: State | None) -> None:
|
||||
"""Schedule calculation using the source state and max_sub_interval.
|
||||
|
||||
The callback reference is stored for possible cancellation if the source state
|
||||
reports a change before max_sub_interval has passed.
|
||||
If the callback is executed, meaning there was no state change reported, the
|
||||
source_state is assumed constant and calculation is done using its value.
|
||||
"""
|
||||
if (
|
||||
self._max_sub_interval is not None
|
||||
and source_state is not None
|
||||
and (_is_decimal_state(source_state.state))
|
||||
):
|
||||
|
||||
@callback
|
||||
def _calc_derivative_on_max_sub_interval_exceeded_callback(
|
||||
now: datetime,
|
||||
) -> None:
|
||||
"""Calculate derivative based on time and reschedule."""
|
||||
|
||||
self._prune_state_list(now)
|
||||
derivative = self._calc_derivative_from_state_list(now)
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
# If derivative is now zero, don't schedule another timeout callback, as it will have no effect
|
||||
if derivative != 0:
|
||||
schedule_max_sub_interval_exceeded(source_state)
|
||||
|
||||
self._cancel_max_sub_interval_exceeded_callback = async_call_later(
|
||||
self.hass,
|
||||
self._max_sub_interval,
|
||||
_calc_derivative_on_max_sub_interval_exceeded_callback,
|
||||
)
|
||||
|
||||
@callback
|
||||
def on_state_reported(event: Event[EventStateReportedData]) -> None:
|
||||
"""Handle constant sensor state."""
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
new_state = event.data["new_state"]
|
||||
if self._attr_native_value == Decimal(0):
|
||||
# If the derivative is zero, and the source sensor hasn't
|
||||
# changed state, then we know it will still be zero.
|
||||
return
|
||||
schedule_max_sub_interval_exceeded(new_state)
|
||||
new_state = event.data["new_state"]
|
||||
if new_state is not None:
|
||||
calc_derivative(
|
||||
@@ -313,9 +225,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
new_state = event.data["new_state"]
|
||||
schedule_max_sub_interval_exceeded(new_state)
|
||||
old_state = event.data["old_state"]
|
||||
if new_state is not None and old_state is not None:
|
||||
calc_derivative(new_state, old_state.state, old_state.last_reported)
|
||||
@@ -402,16 +312,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
self.async_write_ha_state()
|
||||
|
||||
if self._max_sub_interval is not None:
|
||||
source_state = self.hass.states.get(self._sensor_source_id)
|
||||
schedule_max_sub_interval_exceeded(source_state)
|
||||
|
||||
@callback
|
||||
def on_removed() -> None:
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
|
||||
self.async_on_remove(on_removed)
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._sensor_source_id, on_state_changed
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
"title": "Create Derivative sensor",
|
||||
"description": "Create a sensor that estimates the derivative of a sensor.",
|
||||
"data": {
|
||||
"max_sub_interval": "Max sub-interval",
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"round": "Precision",
|
||||
"source": "Input sensor",
|
||||
@@ -15,7 +14,6 @@
|
||||
"unit_time": "Time unit"
|
||||
},
|
||||
"data_description": {
|
||||
"max_sub_interval": "If defined, derivative automatically recalculates if the source has not updated for this duration.",
|
||||
"round": "Controls the number of decimal digits in the output.",
|
||||
"time_window": "If set, the sensor's value is a time-weighted moving average of derivatives within this window.",
|
||||
"unit_prefix": "The output will be scaled according to the selected metric prefix and time unit of the derivative."
|
||||
@@ -27,7 +25,6 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"max_sub_interval": "[%key:component::derivative::config::step::user::data::max_sub_interval%]",
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"round": "[%key:component::derivative::config::step::user::data::round%]",
|
||||
"source": "[%key:component::derivative::config::step::user::data::source%]",
|
||||
@@ -36,7 +33,6 @@
|
||||
"unit_time": "[%key:component::derivative::config::step::user::data::unit_time%]"
|
||||
},
|
||||
"data_description": {
|
||||
"max_sub_interval": "[%key:component::derivative::config::step::user::data_description::max_sub_interval%]",
|
||||
"round": "[%key:component::derivative::config::step::user::data_description::round%]",
|
||||
"time_window": "[%key:component::derivative::config::step::user::data_description::time_window%]",
|
||||
"unit_prefix": "[%key:component::derivative::config::step::user::data_description::unit_prefix%]"
|
||||
|
||||
@@ -9,10 +9,7 @@ import voluptuous as vol
|
||||
from homeassistant.const import CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import (
|
||||
ConditionCheckerType,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.condition import ConditionProtocol, trace_condition_function
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import DeviceAutomationType, async_get_device_automation_platform
|
||||
@@ -22,24 +19,13 @@ if TYPE_CHECKING:
|
||||
from homeassistant.helpers import condition
|
||||
|
||||
|
||||
class DeviceAutomationConditionProtocol(Protocol):
|
||||
class DeviceAutomationConditionProtocol(ConditionProtocol, Protocol):
|
||||
"""Define the format of device_condition modules.
|
||||
|
||||
Each module must define either CONDITION_SCHEMA or async_validate_condition_config.
|
||||
Each module must define either CONDITION_SCHEMA or async_validate_condition_config
|
||||
from ConditionProtocol.
|
||||
"""
|
||||
|
||||
CONDITION_SCHEMA: vol.Schema
|
||||
|
||||
async def async_validate_condition_config(
|
||||
self, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
|
||||
def async_condition_from_config(
|
||||
self, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConditionCheckerType:
|
||||
"""Evaluate state based on configuration."""
|
||||
|
||||
async def async_get_condition_capabilities(
|
||||
self, hass: HomeAssistant, config: ConfigType
|
||||
) -> dict[str, vol.Schema]:
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .const import DOMAIN, GATEWAY_SERIAL_PATTERN, PLATFORMS
|
||||
|
||||
type DevoloHomeControlConfigEntry = ConfigEntry[list[HomeControl]]
|
||||
|
||||
@@ -29,9 +29,25 @@ async def async_setup_entry(
|
||||
"""Set up the devolo account from a config entry."""
|
||||
mydevolo = configure_mydevolo(entry.data)
|
||||
|
||||
gateway_ids = await hass.async_add_executor_job(
|
||||
check_mydevolo_and_get_gateway_ids, mydevolo
|
||||
)
|
||||
credentials_valid = await hass.async_add_executor_job(mydevolo.credentials_valid)
|
||||
|
||||
if not credentials_valid:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
)
|
||||
|
||||
if await hass.async_add_executor_job(mydevolo.maintenance):
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="maintenance",
|
||||
)
|
||||
|
||||
gateway_ids = await hass.async_add_executor_job(mydevolo.get_gateway_ids)
|
||||
|
||||
if entry.unique_id and GATEWAY_SERIAL_PATTERN.match(entry.unique_id):
|
||||
uuid = await hass.async_add_executor_job(mydevolo.uuid)
|
||||
hass.config_entries.async_update_entry(entry, unique_id=uuid)
|
||||
|
||||
def shutdown(event: Event) -> None:
|
||||
for gateway in entry.runtime_data:
|
||||
@@ -99,19 +115,3 @@ def configure_mydevolo(conf: Mapping[str, Any]) -> Mydevolo:
|
||||
mydevolo.user = conf[CONF_USERNAME]
|
||||
mydevolo.password = conf[CONF_PASSWORD]
|
||||
return mydevolo
|
||||
|
||||
|
||||
def check_mydevolo_and_get_gateway_ids(mydevolo: Mydevolo) -> list[str]:
|
||||
"""Check if the credentials are valid and return user's gateway IDs as long as mydevolo is not in maintenance mode."""
|
||||
if not mydevolo.credentials_valid():
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
)
|
||||
if mydevolo.maintenance():
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="maintenance",
|
||||
)
|
||||
|
||||
return mydevolo.get_gateway_ids()
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Constants for the devolo_home_control integration."""
|
||||
|
||||
import re
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "devolo_home_control"
|
||||
@@ -12,4 +14,5 @@ PLATFORMS = [
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
GATEWAY_SERIAL_PATTERN = re.compile(r"\d{16}")
|
||||
SUPPORTED_MODEL_TYPES = ["2600", "2601"]
|
||||
|
||||
@@ -65,7 +65,6 @@ async def _get_fixture_collection(envoy: Envoy, serial: str) -> dict[str, Any]:
|
||||
"/ivp/ensemble/generator",
|
||||
"/ivp/meters",
|
||||
"/ivp/meters/readings",
|
||||
"/ivp/pdm/device_data",
|
||||
"/home",
|
||||
]
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.1.0"],
|
||||
"requirements": ["pyenphase==2.0.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -45,7 +45,6 @@ from homeassistant.const import (
|
||||
UnitOfFrequency,
|
||||
UnitOfPower,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -81,114 +80,6 @@ INVERTER_SENSORS = (
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
value_fn=attrgetter("last_report_watts"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="dc_voltage",
|
||||
translation_key="dc_voltage",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("dc_voltage"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="dc_current",
|
||||
translation_key="dc_current",
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("dc_current"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="ac_voltage",
|
||||
translation_key="ac_voltage",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("ac_voltage"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="ac_current",
|
||||
translation_key="ac_current",
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("ac_current"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="ac_frequency",
|
||||
native_unit_of_measurement=UnitOfFrequency.HERTZ,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.FREQUENCY,
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("ac_frequency"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=attrgetter("temperature"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="lifetime_energy",
|
||||
translation_key="lifetime_energy",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("lifetime_energy"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="energy_today",
|
||||
translation_key="energy_today",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("energy_today"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="last_report_duration",
|
||||
translation_key="last_report_duration",
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=attrgetter("last_report_duration"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="energy_produced",
|
||||
translation_key="energy_produced",
|
||||
native_unit_of_measurement=UnitOfEnergy.MILLIWATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("energy_produced"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key="max_reported",
|
||||
translation_key="max_reported",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=attrgetter("max_report_watts"),
|
||||
),
|
||||
EnvoyInverterSensorEntityDescription(
|
||||
key=LAST_REPORTED_KEY,
|
||||
translation_key=LAST_REPORTED_KEY,
|
||||
|
||||
@@ -379,34 +379,7 @@
|
||||
"name": "Aggregated Battery capacity"
|
||||
},
|
||||
"aggregated_soc": {
|
||||
"name": "Aggregated battery SOC"
|
||||
},
|
||||
"dc_voltage": {
|
||||
"name": "DC voltage"
|
||||
},
|
||||
"dc_current": {
|
||||
"name": "DC current"
|
||||
},
|
||||
"ac_voltage": {
|
||||
"name": "AC voltage"
|
||||
},
|
||||
"ac_current": {
|
||||
"name": "AC current"
|
||||
},
|
||||
"lifetime_energy": {
|
||||
"name": "[%key:component::enphase_envoy::entity::sensor::lifetime_production::name%]"
|
||||
},
|
||||
"energy_today": {
|
||||
"name": "[%key:component::enphase_envoy::entity::sensor::daily_production::name%]"
|
||||
},
|
||||
"energy_produced": {
|
||||
"name": "Energy production since previous report"
|
||||
},
|
||||
"max_reported": {
|
||||
"name": "Lifetime maximum power"
|
||||
},
|
||||
"last_report_duration": {
|
||||
"name": "Last report duration"
|
||||
"name": "Aggregated battery soc"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==33.1.1",
|
||||
"aioesphomeapi==32.2.4",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==2.16.0"
|
||||
],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""The foscam component."""
|
||||
|
||||
from libpyfoscamcgi import FoscamCamera
|
||||
from libpyfoscam import FoscamCamera
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from libpyfoscamcgi import FoscamCamera
|
||||
from libpyfoscamcgi.foscamcgi import (
|
||||
from libpyfoscam import FoscamCamera
|
||||
from libpyfoscam.foscam import (
|
||||
ERROR_FOSCAM_AUTH,
|
||||
ERROR_FOSCAM_UNAVAILABLE,
|
||||
FOSCAM_SUCCESS,
|
||||
|
||||
@@ -4,7 +4,7 @@ import asyncio
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from libpyfoscamcgi import FoscamCamera
|
||||
from libpyfoscam import FoscamCamera
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -5,6 +5,6 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/foscam",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["libpyfoscamcgi"],
|
||||
"requirements": ["libpyfoscamcgi==0.0.6"]
|
||||
"loggers": ["libpyfoscam"],
|
||||
"requirements": ["libpyfoscam==1.2.2"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250531.4"]
|
||||
"requirements": ["home-assistant-frontend==20250531.3"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ from google.genai.types import File, FileState
|
||||
from requests.exceptions import Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
@@ -26,11 +26,7 @@ from homeassistant.exceptions import (
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -60,8 +56,6 @@ type GoogleGenerativeAIConfigEntry = ConfigEntry[Client]
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Google Generative AI Conversation."""
|
||||
|
||||
await async_migrate_integration(hass)
|
||||
|
||||
async def generate_content(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
|
||||
@@ -215,68 +209,3 @@ async def async_unload_entry(
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
"""Migrate integration entry structure."""
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
if not any(entry.version == 1 for entry in entries):
|
||||
return
|
||||
|
||||
api_keys_entries: dict[str, ConfigEntry] = {}
|
||||
entity_registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
for entry in entries:
|
||||
use_existing = False
|
||||
subentry = ConfigSubentry(
|
||||
data=entry.options,
|
||||
subentry_type="conversation",
|
||||
title=entry.title,
|
||||
unique_id=None,
|
||||
)
|
||||
if entry.data[CONF_API_KEY] not in api_keys_entries:
|
||||
use_existing = True
|
||||
api_keys_entries[entry.data[CONF_API_KEY]] = entry
|
||||
|
||||
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]
|
||||
|
||||
hass.config_entries.async_add_subentry(parent_entry, subentry)
|
||||
conversation_entity = entity_registry.async_get_entity_id(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
entry.entry_id,
|
||||
)
|
||||
if conversation_entity is not None:
|
||||
entity_registry.async_update_entity(
|
||||
conversation_entity,
|
||||
config_entry_id=parent_entry.entry_id,
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
new_unique_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, entry.entry_id)}
|
||||
)
|
||||
if device is not None:
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
new_identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
add_config_subentry_id=subentry.subentry_id,
|
||||
add_config_entry_id=parent_entry.entry_id,
|
||||
)
|
||||
if parent_entry.entry_id != entry.entry_id:
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
)
|
||||
|
||||
if not use_existing:
|
||||
await hass.config_entries.async_remove(entry.entry_id)
|
||||
else:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
options={},
|
||||
version=2,
|
||||
)
|
||||
|
||||
@@ -4,7 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
from google import genai
|
||||
from google.genai.errors import APIError, ClientError
|
||||
@@ -14,14 +15,12 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
ConfigEntry,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.selector import (
|
||||
NumberSelector,
|
||||
@@ -46,7 +45,6 @@ from .const import (
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
@@ -68,7 +66,7 @@ STEP_API_DATA_SCHEMA = vol.Schema(
|
||||
|
||||
RECOMMENDED_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
@@ -92,7 +90,7 @@ async def validate_input(data: dict[str, Any]) -> None:
|
||||
class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Google Generative AI Conversation."""
|
||||
|
||||
VERSION = 2
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_api(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -100,7 +98,6 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(user_input)
|
||||
try:
|
||||
await validate_input(user_input)
|
||||
except (APIError, Timeout) as err:
|
||||
@@ -120,14 +117,7 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title="Google Generative AI",
|
||||
data=user_input,
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
],
|
||||
options=RECOMMENDED_OPTIONS,
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="api",
|
||||
@@ -166,72 +156,41 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"conversation": ConversationSubentryFlowHandler}
|
||||
@staticmethod
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return GoogleGenerativeAIOptionsFlow(config_entry)
|
||||
|
||||
|
||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Flow for managing conversation subentries."""
|
||||
class GoogleGenerativeAIOptionsFlow(OptionsFlow):
|
||||
"""Google Generative AI config flow options handler."""
|
||||
|
||||
last_rendered_recommended = False
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.last_rendered_recommended = config_entry.options.get(
|
||||
CONF_RECOMMENDED, False
|
||||
)
|
||||
self._genai_client = config_entry.runtime_data
|
||||
|
||||
@property
|
||||
def _genai_client(self) -> genai.Client:
|
||||
"""Return the Google Generative AI client."""
|
||||
return self._get_entry().runtime_data
|
||||
|
||||
@property
|
||||
def _is_new(self) -> bool:
|
||||
"""Return if this is a new subentry."""
|
||||
return self.source == "user"
|
||||
|
||||
async def async_step_set_options(
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Set conversation options."""
|
||||
# abort if entry is not loaded
|
||||
if self._get_entry().state != ConfigEntryState.LOADED:
|
||||
return self.async_abort(reason="entry_not_loaded")
|
||||
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is None:
|
||||
if self._is_new:
|
||||
options = RECOMMENDED_OPTIONS.copy()
|
||||
else:
|
||||
# If this is a reconfiguration, we need to copy the existing options
|
||||
# so that we can show the current values in the form.
|
||||
options = self._get_reconfigure_subentry().data.copy()
|
||||
|
||||
self.last_rendered_recommended = cast(
|
||||
bool, options.get(CONF_RECOMMENDED, False)
|
||||
)
|
||||
|
||||
else:
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
# Don't allow to save options that enable the Google Seearch tool with an Assist API
|
||||
if not (
|
||||
user_input.get(CONF_LLM_HASS_API)
|
||||
and user_input.get(CONF_USE_GOOGLE_SEARCH_TOOL, False) is True
|
||||
):
|
||||
if self._is_new:
|
||||
return self.async_create_entry(
|
||||
title=user_input.pop(CONF_NAME),
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
data=user_input,
|
||||
)
|
||||
# Don't allow to save options that enable the Google Seearch tool with an Assist API
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
errors[CONF_USE_GOOGLE_SEARCH_TOOL] = "invalid_google_search_option"
|
||||
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
@@ -240,19 +199,15 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
options = user_input
|
||||
|
||||
schema = await google_generative_ai_config_option_schema(
|
||||
self.hass, self._is_new, options, self._genai_client
|
||||
self.hass, options, self._genai_client
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="set_options", data_schema=vol.Schema(schema), errors=errors
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
)
|
||||
|
||||
async_step_reconfigure = async_step_set_options
|
||||
async_step_user = async_step_set_options
|
||||
|
||||
|
||||
async def google_generative_ai_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
is_new: bool,
|
||||
options: Mapping[str, Any],
|
||||
genai_client: genai.Client,
|
||||
) -> dict:
|
||||
@@ -269,32 +224,23 @@ async def google_generative_ai_config_option_schema(
|
||||
):
|
||||
suggested_llm_apis = [suggested_llm_apis]
|
||||
|
||||
if is_new:
|
||||
schema: dict[vol.Required | vol.Optional, Any] = {
|
||||
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
|
||||
}
|
||||
else:
|
||||
schema = {}
|
||||
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": suggested_llm_apis},
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
schema = {
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": suggested_llm_apis},
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
}
|
||||
|
||||
if options.get(CONF_RECOMMENDED):
|
||||
return schema
|
||||
|
||||
@@ -6,12 +6,10 @@ DOMAIN = "google_generative_ai_conversation"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
CONF_PROMPT = "prompt"
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Google AI Conversation"
|
||||
|
||||
ATTR_MODEL = "model"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.5-flash"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.0-flash"
|
||||
RECOMMENDED_TTS_MODEL = "gemini-2.5-flash-preview-tts"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from typing import Literal
|
||||
|
||||
from homeassistant.components import assist_pipeline, conversation
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -22,14 +22,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up conversation entities."""
|
||||
for subentry in config_entry.subentries.values():
|
||||
if subentry.subentry_type != "conversation":
|
||||
continue
|
||||
|
||||
async_add_entities(
|
||||
[GoogleGenerativeAIConversationEntity(config_entry, subentry)],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
agent = GoogleGenerativeAIConversationEntity(config_entry)
|
||||
async_add_entities([agent])
|
||||
|
||||
|
||||
class GoogleGenerativeAIConversationEntity(
|
||||
@@ -41,10 +35,10 @@ class GoogleGenerativeAIConversationEntity(
|
||||
|
||||
_attr_supports_streaming = True
|
||||
|
||||
def __init__(self, entry: ConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
super().__init__(entry, subentry)
|
||||
if self.subentry.data.get(CONF_LLM_HASS_API):
|
||||
super().__init__(entry)
|
||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
)
|
||||
@@ -76,7 +70,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> conversation.ConversationResult:
|
||||
"""Call the API."""
|
||||
options = self.subentry.data
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_provide_llm_data(
|
||||
|
||||
@@ -24,7 +24,7 @@ from google.genai.types import (
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@@ -301,16 +301,17 @@ async def _transform_stream(
|
||||
class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
"""Google Generative AI base entity."""
|
||||
|
||||
def __init__(self, entry: ConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
@@ -321,7 +322,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
options = self.entry.options
|
||||
|
||||
tools: list[Tool | Callable[..., Any]] | None = None
|
||||
if chat_log.llm_api:
|
||||
|
||||
@@ -18,49 +18,35 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"conversation": {
|
||||
"initiate_flow": {
|
||||
"user": "Add conversation agent",
|
||||
"reconfigure": "Reconfigure conversation agent"
|
||||
},
|
||||
"entry_type": "Conversation agent",
|
||||
|
||||
"step": {
|
||||
"set_options": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"recommended": "Recommended model settings",
|
||||
"prompt": "Instructions",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"temperature": "Temperature",
|
||||
"top_p": "Top P",
|
||||
"top_k": "Top K",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"harassment_block_threshold": "Negative or harmful comments targeting identity and/or protected attributes",
|
||||
"hate_block_threshold": "Content that is rude, disrespectful, or profane",
|
||||
"sexual_block_threshold": "Contains references to sexual acts or other lewd content",
|
||||
"dangerous_block_threshold": "Promotes, facilitates, or encourages harmful acts",
|
||||
"enable_google_search_tool": "Enable Google Search tool"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"enable_google_search_tool": "Only works if there is nothing selected in the \"Control Home Assistant\" setting. See docs for a workaround using it with \"Assist\"."
|
||||
}
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"recommended": "Recommended model settings",
|
||||
"prompt": "Instructions",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"temperature": "Temperature",
|
||||
"top_p": "Top P",
|
||||
"top_k": "Top K",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"harassment_block_threshold": "Negative or harmful comments targeting identity and/or protected attributes",
|
||||
"hate_block_threshold": "Content that is rude, disrespectful, or profane",
|
||||
"sexual_block_threshold": "Contains references to sexual acts or other lewd content",
|
||||
"dangerous_block_threshold": "Promotes, facilitates, or encourages harmful acts",
|
||||
"enable_google_search_tool": "Enable Google Search tool"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"enable_google_search_tool": "Only works if there is nothing selected in the \"Control Home Assistant\" setting. See docs for a workaround using it with \"Assist\"."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"invalid_google_search_option": "Google Search can only be enabled if nothing is selected in the \"Control Home Assistant\" setting."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_google_search_option": "Google Search can only be enabled if nothing is selected in the \"Control Home Assistant\" setting."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -113,6 +113,7 @@ class GoogleGenerativeAITextToSpeechEntity(TextToSpeechEntity):
|
||||
self._attr_unique_id = f"{entry.entry_id}_tts"
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
|
||||
@@ -12,6 +12,7 @@ import re
|
||||
import struct
|
||||
from typing import Any, NamedTuple
|
||||
|
||||
import aiofiles
|
||||
from aiohasupervisor import SupervisorError
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -92,7 +93,6 @@ from .const import (
|
||||
ATTR_LOCATION,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_SLUG,
|
||||
COORDINATOR,
|
||||
DATA_COMPONENT,
|
||||
DATA_CONFIG_STORE,
|
||||
DATA_CORE_INFO,
|
||||
@@ -107,7 +107,6 @@ from .const import (
|
||||
HASSIO_UPDATE_INTERVAL,
|
||||
)
|
||||
from .coordinator import (
|
||||
HassioAddOnDataUpdateCoordinator,
|
||||
HassioDataUpdateCoordinator,
|
||||
get_addons_info,
|
||||
get_addons_stats, # noqa: F401
|
||||
@@ -240,6 +239,12 @@ def _is_32_bit() -> bool:
|
||||
return size * 8 == 32
|
||||
|
||||
|
||||
async def _get_arch() -> str:
|
||||
async with aiofiles.open("/etc/apk/arch") as arch_file:
|
||||
raw_arch = await arch_file.read()
|
||||
return {"x86": "i386"}.get(raw_arch, raw_arch)
|
||||
|
||||
|
||||
class APIEndpointSettings(NamedTuple):
|
||||
"""Settings for API endpoint."""
|
||||
|
||||
@@ -557,14 +562,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
||||
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data[COORDINATOR] = coordinator
|
||||
hass.data[ADDONS_COORDINATOR] = coordinator
|
||||
|
||||
addon_coordinator = HassioAddOnDataUpdateCoordinator(hass, entry, dev_reg)
|
||||
await addon_coordinator.async_config_entry_first_refresh()
|
||||
hass.data[ADDONS_COORDINATOR] = addon_coordinator
|
||||
arch = await _get_arch()
|
||||
|
||||
def deprecated_setup_issue() -> None:
|
||||
os_info = get_os_info(hass)
|
||||
@@ -573,7 +575,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return
|
||||
is_haos = info.get("hassos") is not None
|
||||
board = os_info.get("board")
|
||||
arch = info.get("arch", "unknown")
|
||||
unsupported_board = board in {"tinker", "odroid-xu4", "rpi2"}
|
||||
unsupported_os_on_board = board in {"rpi3", "rpi4"}
|
||||
if is_haos and (unsupported_board or unsupported_os_on_board):
|
||||
|
||||
@@ -41,15 +41,15 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Binary sensor set up for Hass.io config entry."""
|
||||
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
async_add_entities(
|
||||
HassioAddonBinarySensor(
|
||||
addon=addon,
|
||||
coordinator=addons_coordinator,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
@@ -71,7 +71,6 @@ EVENT_ISSUE_REMOVED = "issue_removed"
|
||||
|
||||
UPDATE_KEY_SUPERVISOR = "supervisor"
|
||||
|
||||
COORDINATOR = "hassio_coordinator"
|
||||
ADDONS_COORDINATOR = "hassio_addons_coordinator"
|
||||
|
||||
|
||||
@@ -86,11 +85,9 @@ DATA_OS_INFO = "hassio_os_info"
|
||||
DATA_NETWORK_INFO = "hassio_network_info"
|
||||
DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
|
||||
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
|
||||
DATA_ADDONS = "hassio_addons"
|
||||
DATA_ADDONS_INFO = "hassio_addons_info"
|
||||
DATA_ADDONS_STATS = "hassio_addons_stats"
|
||||
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
HASSIO_ADDON_UPDATE_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
ATTR_AUTO_UPDATE = "auto_update"
|
||||
ATTR_VERSION = "version"
|
||||
|
||||
@@ -30,7 +30,6 @@ from .const import (
|
||||
CONTAINER_INFO,
|
||||
CONTAINER_STATS,
|
||||
CORE_CONTAINER,
|
||||
DATA_ADDONS,
|
||||
DATA_ADDONS_INFO,
|
||||
DATA_ADDONS_STATS,
|
||||
DATA_COMPONENT,
|
||||
@@ -50,7 +49,6 @@ from .const import (
|
||||
DATA_SUPERVISOR_INFO,
|
||||
DATA_SUPERVISOR_STATS,
|
||||
DOMAIN,
|
||||
HASSIO_ADDON_UPDATE_INTERVAL,
|
||||
HASSIO_UPDATE_INTERVAL,
|
||||
REQUEST_REFRESH_DELAY,
|
||||
SUPERVISOR_CONTAINER,
|
||||
@@ -114,16 +112,6 @@ def get_network_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
return hass.data.get(DATA_NETWORK_INFO)
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_addons(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
"""Return Addons info.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
return hass.data.get(DATA_ADDONS)
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any]] | None:
|
||||
@@ -291,8 +279,8 @@ def async_remove_addons_from_dev_reg(
|
||||
dev_reg.async_remove_device(dev.id)
|
||||
|
||||
|
||||
class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to retrieve Hass.io Add-on status."""
|
||||
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to retrieve Hass.io status."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
@@ -305,7 +293,7 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=HASSIO_ADDON_UPDATE_INTERVAL,
|
||||
update_interval=HASSIO_UPDATE_INTERVAL,
|
||||
# We don't want an immediate refresh since we want to avoid
|
||||
# fetching the container stats right away and avoid hammering
|
||||
# the Supervisor API on startup
|
||||
@@ -317,6 +305,7 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
self.data = {}
|
||||
self.entry_id = config_entry.entry_id
|
||||
self.dev_reg = dev_reg
|
||||
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
|
||||
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
|
||||
lambda: defaultdict(set)
|
||||
)
|
||||
@@ -332,7 +321,7 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||
|
||||
new_data: dict[str, Any] = {}
|
||||
addons = get_addons(self.hass) or {}
|
||||
supervisor_info = get_supervisor_info(self.hass) or {}
|
||||
addons_info = get_addons_info(self.hass) or {}
|
||||
addons_stats = get_addons_stats(self.hass)
|
||||
store_data = get_store(self.hass)
|
||||
@@ -356,14 +345,37 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
addon.get(ATTR_REPOSITORY), addon.get(ATTR_REPOSITORY, "")
|
||||
),
|
||||
}
|
||||
for addon in addons.get("addons", [])
|
||||
for addon in supervisor_info.get("addons", [])
|
||||
}
|
||||
if self.is_hass_os:
|
||||
new_data[DATA_KEY_OS] = get_os_info(self.hass)
|
||||
|
||||
new_data[DATA_KEY_CORE] = {
|
||||
**(get_core_info(self.hass) or {}),
|
||||
**get_core_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_SUPERVISOR] = {
|
||||
**supervisor_info,
|
||||
**get_supervisor_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
||||
|
||||
# If this is the initial refresh, register all addons and return the dict
|
||||
if is_first_update:
|
||||
async_register_addons_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
async_register_core_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
|
||||
)
|
||||
async_register_supervisor_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_SUPERVISOR]
|
||||
)
|
||||
async_register_host_in_dev_reg(self.entry_id, self.dev_reg)
|
||||
if self.is_hass_os:
|
||||
async_register_os_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
|
||||
)
|
||||
|
||||
# Remove add-ons that are no longer installed from device registry
|
||||
supervisor_addon_devices = {
|
||||
@@ -376,6 +388,12 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
||||
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
|
||||
|
||||
if not self.is_hass_os and (
|
||||
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
|
||||
):
|
||||
# Remove the OS device if it exists and the installation is not hassos
|
||||
self.dev_reg.async_remove_device(dev.id)
|
||||
|
||||
# If there are new add-ons, we should reload the config entry so we can
|
||||
# create new devices and entities. We can return an empty dict because
|
||||
# coordinator will be recreated.
|
||||
@@ -401,9 +419,23 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
container_updates = self._container_updates
|
||||
|
||||
data = self.hass.data
|
||||
data[DATA_ADDONS] = await self.hassio.get_addons()
|
||||
hassio = self.hassio
|
||||
updates = {
|
||||
DATA_INFO: hassio.get_info(),
|
||||
DATA_CORE_INFO: hassio.get_core_info(),
|
||||
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
|
||||
DATA_OS_INFO: hassio.get_os_info(),
|
||||
}
|
||||
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
|
||||
updates[DATA_CORE_STATS] = hassio.get_core_stats()
|
||||
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
|
||||
updates[DATA_SUPERVISOR_STATS] = hassio.get_supervisor_stats()
|
||||
|
||||
_addon_data = data[DATA_ADDONS].get("addons", [])
|
||||
results = await asyncio.gather(*updates.values())
|
||||
for key, result in zip(updates, results, strict=False):
|
||||
data[key] = result
|
||||
|
||||
_addon_data = data[DATA_SUPERVISOR_INFO].get("addons", [])
|
||||
all_addons: list[str] = []
|
||||
started_addons: list[str] = []
|
||||
for addon in _addon_data:
|
||||
@@ -499,159 +531,14 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
) -> None:
|
||||
"""Refresh data."""
|
||||
if not scheduled and not raise_on_auth_failed:
|
||||
# Force reloading add-on updates for non-scheduled
|
||||
# updates.
|
||||
#
|
||||
# Force refreshing updates for non-scheduled updates
|
||||
# If `raise_on_auth_failed` is set, it means this is
|
||||
# the first refresh and we do not want to delay
|
||||
# startup or cause a timeout so we only refresh the
|
||||
# updates if this is not a scheduled refresh and
|
||||
# we are not doing the first refresh.
|
||||
try:
|
||||
await self.supervisor_client.store.reload()
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Error on Supervisor API: %s", err)
|
||||
|
||||
await super()._async_refresh(
|
||||
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
|
||||
)
|
||||
|
||||
|
||||
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to retrieve Hass.io status."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: ConfigEntry, dev_reg: dr.DeviceRegistry
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=HASSIO_UPDATE_INTERVAL,
|
||||
# We don't want an immediate refresh since we want to avoid
|
||||
# fetching the container stats right away and avoid hammering
|
||||
# the Supervisor API on startup
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
|
||||
),
|
||||
)
|
||||
self.hassio = hass.data[DATA_COMPONENT]
|
||||
self.data = {}
|
||||
self.entry_id = config_entry.entry_id
|
||||
self.dev_reg = dev_reg
|
||||
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
|
||||
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
|
||||
lambda: defaultdict(set)
|
||||
)
|
||||
self.supervisor_client = get_supervisor_client(hass)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
is_first_update = not self.data
|
||||
|
||||
try:
|
||||
await self.force_data_refresh(is_first_update)
|
||||
except HassioAPIError as err:
|
||||
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||
|
||||
new_data: dict[str, Any] = {}
|
||||
supervisor_info = get_supervisor_info(self.hass) or {}
|
||||
|
||||
if self.is_hass_os:
|
||||
new_data[DATA_KEY_OS] = get_os_info(self.hass)
|
||||
|
||||
new_data[DATA_KEY_CORE] = {
|
||||
**(get_core_info(self.hass) or {}),
|
||||
**get_core_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_SUPERVISOR] = {
|
||||
**supervisor_info,
|
||||
**get_supervisor_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
||||
|
||||
# If this is the initial refresh, register all main components
|
||||
if is_first_update:
|
||||
async_register_core_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
|
||||
)
|
||||
async_register_supervisor_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_SUPERVISOR]
|
||||
)
|
||||
async_register_host_in_dev_reg(self.entry_id, self.dev_reg)
|
||||
if self.is_hass_os:
|
||||
async_register_os_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
|
||||
)
|
||||
|
||||
if not self.is_hass_os and (
|
||||
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
|
||||
):
|
||||
# Remove the OS device if it exists and the installation is not hassos
|
||||
self.dev_reg.async_remove_device(dev.id)
|
||||
|
||||
return new_data
|
||||
|
||||
async def force_data_refresh(self, first_update: bool) -> None:
|
||||
"""Force update of the addon info."""
|
||||
container_updates = self._container_updates
|
||||
|
||||
data = self.hass.data
|
||||
hassio = self.hassio
|
||||
updates = {
|
||||
DATA_INFO: hassio.get_info(),
|
||||
DATA_CORE_INFO: hassio.get_core_info(),
|
||||
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
|
||||
DATA_OS_INFO: hassio.get_os_info(),
|
||||
}
|
||||
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
|
||||
updates[DATA_CORE_STATS] = hassio.get_core_stats()
|
||||
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
|
||||
updates[DATA_SUPERVISOR_STATS] = hassio.get_supervisor_stats()
|
||||
|
||||
results = await asyncio.gather(*updates.values())
|
||||
for key, result in zip(updates, results, strict=False):
|
||||
data[key] = result
|
||||
|
||||
@callback
|
||||
def async_enable_container_updates(
|
||||
self, slug: str, entity_id: str, types: set[str]
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Enable updates for an add-on."""
|
||||
enabled_updates = self._container_updates[slug]
|
||||
for key in types:
|
||||
enabled_updates[key].add(entity_id)
|
||||
|
||||
@callback
|
||||
def _remove() -> None:
|
||||
for key in types:
|
||||
enabled_updates[key].remove(entity_id)
|
||||
|
||||
return _remove
|
||||
|
||||
async def _async_refresh(
|
||||
self,
|
||||
log_failures: bool = True,
|
||||
raise_on_auth_failed: bool = False,
|
||||
scheduled: bool = False,
|
||||
raise_on_entry_error: bool = False,
|
||||
) -> None:
|
||||
"""Refresh data."""
|
||||
if not scheduled and not raise_on_auth_failed:
|
||||
# Force reloading updates of main components for
|
||||
# non-scheduled updates.
|
||||
#
|
||||
# If `raise_on_auth_failed` is set, it means this is
|
||||
# the first refresh and we do not want to delay
|
||||
# startup or cause a timeout so we only refresh the
|
||||
# updates if this is not a scheduled refresh and
|
||||
# we are not doing the first refresh.
|
||||
try:
|
||||
await self.supervisor_client.reload_updates()
|
||||
await self.supervisor_client.refresh_updates()
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Error on Supervisor API: %s", err)
|
||||
|
||||
|
||||
@@ -10,8 +10,8 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .const import ADDONS_COORDINATOR, COORDINATOR
|
||||
from .coordinator import HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator
|
||||
from .const import ADDONS_COORDINATOR
|
||||
from .coordinator import HassioDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -19,8 +19,7 @@ async def async_get_config_entry_diagnostics(
|
||||
config_entry: ConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: HassioDataUpdateCoordinator = hass.data[COORDINATOR]
|
||||
addons_coordinator: HassioAddOnDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
|
||||
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
|
||||
device_registry = dr.async_get(hass)
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
@@ -51,6 +50,5 @@ async def async_get_config_entry_diagnostics(
|
||||
|
||||
return {
|
||||
"coordinator_data": coordinator.data,
|
||||
"addons_coordinator_data": addons_coordinator.data,
|
||||
"devices": devices,
|
||||
}
|
||||
|
||||
@@ -21,17 +21,17 @@ from .const import (
|
||||
KEY_TO_UPDATE_TYPES,
|
||||
SUPERVISOR_CONTAINER,
|
||||
)
|
||||
from .coordinator import HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator
|
||||
from .coordinator import HassioDataUpdateCoordinator
|
||||
|
||||
|
||||
class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
|
||||
class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base entity for a Hass.io add-on."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioAddOnDataUpdateCoordinator,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
addon: dict[str, Any],
|
||||
) -> None:
|
||||
|
||||
@@ -226,14 +226,6 @@ class HassIO:
|
||||
"""
|
||||
return self.send_command("/ingress/panels", method="get")
|
||||
|
||||
@api_data
|
||||
def get_addons(self) -> Coroutine:
|
||||
"""Return data installed Add-ons.
|
||||
|
||||
This method returns a coroutine.
|
||||
"""
|
||||
return self.send_command("/addons", method="get")
|
||||
|
||||
@_api_bool
|
||||
async def update_hass_api(
|
||||
self, http_config: dict[str, Any], refresh_token: RefreshToken
|
||||
|
||||
@@ -19,7 +19,6 @@ from .const import (
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
COORDINATOR,
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
@@ -115,21 +114,20 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Sensor set up for Hass.io config entry."""
|
||||
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
entities: list[
|
||||
HassioOSSensor | HassioAddonSensor | CoreSensor | SupervisorSensor | HostSensor
|
||||
] = [
|
||||
HassioAddonSensor(
|
||||
addon=addon,
|
||||
coordinator=addons_coordinator,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
]
|
||||
|
||||
coordinator = hass.data[COORDINATOR]
|
||||
entities.extend(
|
||||
CoreSensor(
|
||||
coordinator=coordinator,
|
||||
|
||||
@@ -24,7 +24,6 @@ from .const import (
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
COORDINATOR,
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_OS,
|
||||
@@ -50,9 +49,9 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Supervisor update based on a config entry."""
|
||||
coordinator = hass.data[COORDINATOR]
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
entities: list[UpdateEntity] = [
|
||||
entities = [
|
||||
SupervisorSupervisorUpdateEntity(
|
||||
coordinator=coordinator,
|
||||
entity_description=ENTITY_DESCRIPTION,
|
||||
@@ -63,6 +62,15 @@ async def async_setup_entry(
|
||||
),
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
SupervisorAddonUpdateEntity(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=ENTITY_DESCRIPTION,
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
|
||||
if coordinator.is_hass_os:
|
||||
entities.append(
|
||||
SupervisorOSUpdateEntity(
|
||||
@@ -71,16 +79,6 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
entities.extend(
|
||||
SupervisorAddonUpdateEntity(
|
||||
addon=addon,
|
||||
coordinator=addons_coordinator,
|
||||
entity_description=ENTITY_DESCRIPTION,
|
||||
)
|
||||
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import logging
|
||||
import struct
|
||||
from typing import Any
|
||||
|
||||
import aiofiles
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config as conf_util, core_config
|
||||
@@ -17,7 +18,6 @@ from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
RESTART_EXIT_CODE,
|
||||
SERVICE_RELOAD,
|
||||
SERVICE_SAVE_PERSISTENT_STATES,
|
||||
@@ -26,7 +26,6 @@ from homeassistant.const import (
|
||||
SERVICE_TURN_ON,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
@@ -102,6 +101,12 @@ def _is_32_bit() -> bool:
|
||||
return size * 8 == 32
|
||||
|
||||
|
||||
async def _get_arch() -> str:
|
||||
async with aiofiles.open("/etc/apk/arch") as arch_file:
|
||||
raw_arch = (await arch_file.read()).strip()
|
||||
return {"x86": "i386", "x86_64": "amd64"}.get(raw_arch, raw_arch)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901
|
||||
"""Set up general services related to Home Assistant."""
|
||||
|
||||
@@ -406,50 +411,45 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
hass.data[DATA_EXPOSED_ENTITIES] = exposed_entities
|
||||
async_set_stop_handler(hass, _async_stop)
|
||||
|
||||
async def _async_check_deprecation(event: Event) -> None:
|
||||
"""Check and create deprecation issues after startup."""
|
||||
info = await async_get_system_info(hass)
|
||||
info = await async_get_system_info(hass)
|
||||
|
||||
installation_type = info["installation_type"][15:]
|
||||
if installation_type in {"Core", "Container"}:
|
||||
deprecated_method = installation_type == "Core"
|
||||
bit32 = _is_32_bit()
|
||||
arch = info["arch"]
|
||||
if bit32 and installation_type == "Container":
|
||||
arch = info.get("container_arch", arch)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_container",
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_container",
|
||||
translation_placeholders={"arch": arch},
|
||||
)
|
||||
deprecated_architecture = bit32 and installation_type != "Container"
|
||||
if deprecated_method or deprecated_architecture:
|
||||
issue_id = "deprecated"
|
||||
if deprecated_method:
|
||||
issue_id += "_method"
|
||||
if deprecated_architecture:
|
||||
issue_id += "_architecture"
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=issue_id,
|
||||
translation_placeholders={
|
||||
"installation_type": installation_type,
|
||||
"arch": arch,
|
||||
},
|
||||
)
|
||||
|
||||
# Delay deprecation check to make sure installation method is determined correctly
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _async_check_deprecation)
|
||||
installation_type = info["installation_type"][15:]
|
||||
if installation_type in {"Core", "Container"}:
|
||||
deprecated_method = installation_type == "Core"
|
||||
bit32 = _is_32_bit()
|
||||
arch = info["arch"]
|
||||
if bit32 and installation_type == "Container":
|
||||
arch = await _get_arch()
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_container",
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_container",
|
||||
translation_placeholders={"arch": arch},
|
||||
)
|
||||
deprecated_architecture = bit32 and installation_type != "Container"
|
||||
if deprecated_method or deprecated_architecture:
|
||||
issue_id = "deprecated"
|
||||
if deprecated_method:
|
||||
issue_id += "_method"
|
||||
if deprecated_architecture:
|
||||
issue_id += "_architecture"
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=issue_id,
|
||||
translation_placeholders={
|
||||
"installation_type": installation_type,
|
||||
"arch": arch,
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -124,7 +124,6 @@
|
||||
"info": {
|
||||
"arch": "CPU architecture",
|
||||
"config_dir": "Configuration directory",
|
||||
"container_arch": "Container architecture",
|
||||
"dev": "Development",
|
||||
"docker": "Docker",
|
||||
"hassio": "Supervisor",
|
||||
|
||||
@@ -27,7 +27,6 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
"dev": info.get("dev"),
|
||||
"hassio": info.get("hassio"),
|
||||
"docker": info.get("docker"),
|
||||
"container_arch": info.get("container_arch"),
|
||||
"user": info.get("user"),
|
||||
"virtualenv": info.get("virtualenv"),
|
||||
"python_version": info.get("python_version"),
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
|
||||
"integration_type": "system",
|
||||
"requirements": [
|
||||
"universal-silabs-flasher==0.0.31",
|
||||
"universal-silabs-flasher==0.0.30",
|
||||
"ha-silabs-firmware-client==0.2.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -129,6 +129,8 @@ class HomeeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
): str
|
||||
}
|
||||
),
|
||||
description_placeholders={"name": str(reconfigure_entry.unique_id)},
|
||||
description_placeholders={
|
||||
"name": reconfigure_entry.runtime_data.settings.uid
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==9.2.0"],
|
||||
"requirements": ["python-homewizard-energy==9.1.1"],
|
||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
"authorize": {
|
||||
"title": "Authorize",
|
||||
"description": "Press the button on the HomeWizard Energy device for two seconds, then select the button below."
|
||||
"description": "Press the button on the HomeWizard Energy device, then select the button below."
|
||||
},
|
||||
"reconfigure": {
|
||||
"description": "Update configuration for {title}.",
|
||||
|
||||
@@ -45,9 +45,7 @@ class ImgwPibFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
imgwpib = await ImgwPib.create(
|
||||
client_session,
|
||||
hydrological_station_id=station_id,
|
||||
hydrological_details=False,
|
||||
client_session, hydrological_station_id=station_id
|
||||
)
|
||||
hydrological_data = await imgwpib.get_hydrological_data()
|
||||
except (ClientError, TimeoutError, ApiError):
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["imgw_pib==1.1.0"]
|
||||
"requirements": ["imgw_pib==1.0.10"]
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import ImmichConfigEntry, ImmichDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.UPDATE]
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ImmichConfigEntry) -> bool:
|
||||
@@ -33,7 +33,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ImmichConfigEntry) -> bo
|
||||
entry.data[CONF_HOST],
|
||||
entry.data[CONF_PORT],
|
||||
entry.data[CONF_SSL],
|
||||
"home-assistant",
|
||||
)
|
||||
|
||||
try:
|
||||
|
||||
@@ -13,9 +13,7 @@ from aioimmich.server.models import (
|
||||
ImmichServerAbout,
|
||||
ImmichServerStatistics,
|
||||
ImmichServerStorage,
|
||||
ImmichServerVersionCheck,
|
||||
)
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SSL
|
||||
@@ -35,7 +33,6 @@ class ImmichData:
|
||||
server_about: ImmichServerAbout
|
||||
server_storage: ImmichServerStorage
|
||||
server_usage: ImmichServerStatistics | None
|
||||
server_version_check: ImmichServerVersionCheck | None
|
||||
|
||||
|
||||
type ImmichConfigEntry = ConfigEntry[ImmichDataUpdateCoordinator]
|
||||
@@ -74,16 +71,9 @@ class ImmichDataUpdateCoordinator(DataUpdateCoordinator[ImmichData]):
|
||||
if self.is_admin
|
||||
else None
|
||||
)
|
||||
server_version_check = (
|
||||
await self.api.server.async_get_version_check()
|
||||
if AwesomeVersion(server_about.version) >= AwesomeVersion("v1.134.0")
|
||||
else None
|
||||
)
|
||||
except ImmichUnauthorizedError as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except CONNECT_ERRORS as err:
|
||||
raise UpdateFailed from err
|
||||
|
||||
return ImmichData(
|
||||
server_about, server_storage, server_usage, server_version_check
|
||||
)
|
||||
return ImmichData(server_about, server_storage, server_usage)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioimmich"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioimmich==0.10.1"]
|
||||
"requirements": ["aioimmich==0.9.1"]
|
||||
}
|
||||
|
||||
@@ -68,11 +68,6 @@
|
||||
"usage_by_videos": {
|
||||
"name": "Disk used by videos"
|
||||
}
|
||||
},
|
||||
"update": {
|
||||
"update": {
|
||||
"name": "Version"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
"""Update platform for the Immich integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.update import UpdateEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ImmichConfigEntry, ImmichDataUpdateCoordinator
|
||||
from .entity import ImmichEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ImmichConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add immich server update entity."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
if coordinator.data.server_version_check is not None:
|
||||
async_add_entities([ImmichUpdateEntity(coordinator)])
|
||||
|
||||
|
||||
class ImmichUpdateEntity(ImmichEntity, UpdateEntity):
|
||||
"""Define Immich update entity."""
|
||||
|
||||
_attr_translation_key = "update"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ImmichDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_update"
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str:
|
||||
"""Current installed immich server version."""
|
||||
return self.coordinator.data.server_about.version
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Available new immich server version."""
|
||||
assert self.coordinator.data.server_version_check
|
||||
return self.coordinator.data.server_version_check.release_version
|
||||
|
||||
@property
|
||||
def release_url(self) -> str | None:
|
||||
"""URL to the full release notes of the new immich server version."""
|
||||
return (
|
||||
f"https://github.com/immich-app/immich/releases/tag/{self.latest_version}"
|
||||
)
|
||||
@@ -1,36 +1,95 @@
|
||||
"""The JuiceNet integration."""
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
import logging
|
||||
|
||||
from .const import DOMAIN
|
||||
import aiohttp
|
||||
from pyjuicenet import Api, TokenError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR
|
||||
from .coordinator import JuiceNetCoordinator
|
||||
from .device import JuiceNetApi
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
vol.All(
|
||||
cv.deprecated(DOMAIN),
|
||||
{DOMAIN: vol.Schema({vol.Required(CONF_ACCESS_TOKEN): cv.string})},
|
||||
),
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the JuiceNet component."""
|
||||
conf = config.get(DOMAIN)
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
if not conf:
|
||||
return True
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up JuiceNet from a config entry."""
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
DOMAIN,
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="integration_removed",
|
||||
translation_placeholders={
|
||||
"entries": "/config/integrations/integration/juicenet",
|
||||
},
|
||||
)
|
||||
config = entry.data
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
access_token = config[CONF_ACCESS_TOKEN]
|
||||
api = Api(access_token, session)
|
||||
|
||||
juicenet = JuiceNetApi(api)
|
||||
|
||||
try:
|
||||
await juicenet.setup()
|
||||
except TokenError as error:
|
||||
_LOGGER.error("JuiceNet Error %s", error)
|
||||
return False
|
||||
except aiohttp.ClientError as error:
|
||||
_LOGGER.error("Could not reach the JuiceNet API %s", error)
|
||||
raise ConfigEntryNotReady from error
|
||||
|
||||
if not juicenet.devices:
|
||||
_LOGGER.error("No JuiceNet devices found for this account")
|
||||
return False
|
||||
_LOGGER.debug("%d JuiceNet device(s) found", len(juicenet.devices))
|
||||
|
||||
coordinator = JuiceNetCoordinator(hass, entry, juicenet)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
JUICENET_API: juicenet,
|
||||
JUICENET_COORDINATOR: coordinator,
|
||||
}
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if all(
|
||||
config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
for config_entry in hass.config_entries.async_entries(DOMAIN)
|
||||
if config_entry.entry_id != entry.entry_id
|
||||
):
|
||||
ir.async_delete_issue(hass, DOMAIN, DOMAIN)
|
||||
|
||||
return True
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
|
||||
@@ -1,11 +1,82 @@
|
||||
"""Config flow for JuiceNet integration."""
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from pyjuicenet import Api, TokenError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import core, exceptions
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str})
|
||||
|
||||
|
||||
async def validate_input(hass: core.HomeAssistant, data):
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
session = async_get_clientsession(hass)
|
||||
juicenet = Api(data[CONF_ACCESS_TOKEN], session)
|
||||
|
||||
try:
|
||||
await juicenet.get_devices()
|
||||
except TokenError as error:
|
||||
_LOGGER.error("Token Error %s", error)
|
||||
raise InvalidAuth from error
|
||||
except aiohttp.ClientError as error:
|
||||
_LOGGER.error("Error connecting %s", error)
|
||||
raise CannotConnect from error
|
||||
|
||||
# Return info that you want to store in the config entry.
|
||||
return {"title": "JuiceNet"}
|
||||
|
||||
|
||||
class JuiceNetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for JuiceNet."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
await self.async_set_unique_id(user_input[CONF_ACCESS_TOKEN])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle import."""
|
||||
return await self.async_step_user(import_data)
|
||||
|
||||
|
||||
class CannotConnect(exceptions.HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
|
||||
class InvalidAuth(exceptions.HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
"""Constants used by the JuiceNet component."""
|
||||
|
||||
DOMAIN = "juicenet"
|
||||
|
||||
JUICENET_API = "juicenet_api"
|
||||
JUICENET_COORDINATOR = "juicenet_coordinator"
|
||||
|
||||
33
homeassistant/components/juicenet/coordinator.py
Normal file
33
homeassistant/components/juicenet/coordinator.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""The JuiceNet integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .device import JuiceNetApi
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JuiceNetCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Coordinator for JuiceNet."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry, juicenet_api: JuiceNetApi
|
||||
) -> None:
|
||||
"""Initialize the JuiceNet coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name="JuiceNet",
|
||||
update_interval=timedelta(seconds=30),
|
||||
)
|
||||
self.juicenet_api = juicenet_api
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
for device in self.juicenet_api.devices:
|
||||
await device.update_state(True)
|
||||
21
homeassistant/components/juicenet/device.py
Normal file
21
homeassistant/components/juicenet/device.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Adapter to wrap the pyjuicenet api for home assistant."""
|
||||
|
||||
from pyjuicenet import Api, Charger
|
||||
|
||||
|
||||
class JuiceNetApi:
|
||||
"""Represent a connection to JuiceNet."""
|
||||
|
||||
def __init__(self, api: Api) -> None:
|
||||
"""Create an object from the provided API instance."""
|
||||
self.api = api
|
||||
self._devices: list[Charger] = []
|
||||
|
||||
async def setup(self) -> None:
|
||||
"""JuiceNet device setup."""
|
||||
self._devices = await self.api.get_devices()
|
||||
|
||||
@property
|
||||
def devices(self) -> list[Charger]:
|
||||
"""Get a list of devices managed by this account."""
|
||||
return self._devices
|
||||
32
homeassistant/components/juicenet/entity.py
Normal file
32
homeassistant/components/juicenet/entity.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Adapter to wrap the pyjuicenet api for home assistant."""
|
||||
|
||||
from pyjuicenet import Charger
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import JuiceNetCoordinator
|
||||
|
||||
|
||||
class JuiceNetEntity(CoordinatorEntity[JuiceNetCoordinator]):
|
||||
"""Represent a base JuiceNet device."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self, device: Charger, key: str, coordinator: JuiceNetCoordinator
|
||||
) -> None:
|
||||
"""Initialise the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.device = device
|
||||
self.key = key
|
||||
self._attr_unique_id = f"{device.id}-{key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=(
|
||||
f"https://home.juice.net/Portal/Details?unitID={device.id}"
|
||||
),
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
manufacturer="JuiceNet",
|
||||
name=device.name,
|
||||
)
|
||||
@@ -1,9 +1,10 @@
|
||||
{
|
||||
"domain": "juicenet",
|
||||
"name": "JuiceNet",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@jesserockz"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/juicenet",
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": []
|
||||
"loggers": ["pyjuicenet"],
|
||||
"requirements": ["python-juicenet==1.1.0"]
|
||||
}
|
||||
|
||||
93
homeassistant/components/juicenet/number.py
Normal file
93
homeassistant/components/juicenet/number.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Support for controlling juicenet/juicepoint/juicebox based EVSE numbers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyjuicenet import Charger
|
||||
|
||||
from homeassistant.components.number import (
|
||||
DEFAULT_MAX_VALUE,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR
|
||||
from .coordinator import JuiceNetCoordinator
|
||||
from .device import JuiceNetApi
|
||||
from .entity import JuiceNetEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class JuiceNetNumberEntityDescription(NumberEntityDescription):
|
||||
"""An entity description for a JuiceNetNumber."""
|
||||
|
||||
setter_key: str
|
||||
native_max_value_key: str | None = None
|
||||
|
||||
|
||||
NUMBER_TYPES: tuple[JuiceNetNumberEntityDescription, ...] = (
|
||||
JuiceNetNumberEntityDescription(
|
||||
translation_key="amperage_limit",
|
||||
key="current_charging_amperage_limit",
|
||||
native_min_value=6,
|
||||
native_max_value_key="max_charging_amperage",
|
||||
native_step=1,
|
||||
setter_key="set_charging_amperage_limit",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the JuiceNet Numbers."""
|
||||
juicenet_data = hass.data[DOMAIN][config_entry.entry_id]
|
||||
api: JuiceNetApi = juicenet_data[JUICENET_API]
|
||||
coordinator: JuiceNetCoordinator = juicenet_data[JUICENET_COORDINATOR]
|
||||
|
||||
entities = [
|
||||
JuiceNetNumber(device, description, coordinator)
|
||||
for device in api.devices
|
||||
for description in NUMBER_TYPES
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class JuiceNetNumber(JuiceNetEntity, NumberEntity):
|
||||
"""Implementation of a JuiceNet number."""
|
||||
|
||||
entity_description: JuiceNetNumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Charger,
|
||||
description: JuiceNetNumberEntityDescription,
|
||||
coordinator: JuiceNetCoordinator,
|
||||
) -> None:
|
||||
"""Initialise the number."""
|
||||
super().__init__(device, description.key, coordinator)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the value of the entity."""
|
||||
return getattr(self.device, self.entity_description.key, None)
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return the maximum value."""
|
||||
if self.entity_description.native_max_value_key is not None:
|
||||
return getattr(self.device, self.entity_description.native_max_value_key)
|
||||
if self.entity_description.native_max_value is not None:
|
||||
return self.entity_description.native_max_value
|
||||
return DEFAULT_MAX_VALUE
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
await getattr(self.device, self.entity_description.setter_key)(value)
|
||||
124
homeassistant/components/juicenet/sensor.py
Normal file
124
homeassistant/components/juicenet/sensor.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""Support for monitoring juicenet/juicepoint/juicebox based EVSE sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyjuicenet import Charger
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
UnitOfEnergy,
|
||||
UnitOfPower,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR
|
||||
from .coordinator import JuiceNetCoordinator
|
||||
from .device import JuiceNetApi
|
||||
from .entity import JuiceNetEntity
|
||||
|
||||
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="status",
|
||||
name="Charging Status",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="voltage",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="amps",
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="watts",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="charge_time",
|
||||
translation_key="charge_time",
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
icon="mdi:timer-outline",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="energy_added",
|
||||
translation_key="energy_added",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the JuiceNet Sensors."""
|
||||
juicenet_data = hass.data[DOMAIN][config_entry.entry_id]
|
||||
api: JuiceNetApi = juicenet_data[JUICENET_API]
|
||||
coordinator: JuiceNetCoordinator = juicenet_data[JUICENET_COORDINATOR]
|
||||
|
||||
entities = [
|
||||
JuiceNetSensorDevice(device, coordinator, description)
|
||||
for device in api.devices
|
||||
for description in SENSOR_TYPES
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class JuiceNetSensorDevice(JuiceNetEntity, SensorEntity):
|
||||
"""Implementation of a JuiceNet sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Charger,
|
||||
coordinator: JuiceNetCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialise the sensor."""
|
||||
super().__init__(device, description.key, coordinator)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon of the sensor."""
|
||||
icon = None
|
||||
if self.entity_description.key == "status":
|
||||
status = self.device.status
|
||||
if status == "standby":
|
||||
icon = "mdi:power-plug-off"
|
||||
elif status == "plugged":
|
||||
icon = "mdi:power-plug"
|
||||
elif status == "charging":
|
||||
icon = "mdi:battery-positive"
|
||||
else:
|
||||
icon = self.entity_description.icon
|
||||
return icon
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state."""
|
||||
return getattr(self.device, self.entity_description.key, None)
|
||||
@@ -1,8 +1,41 @@
|
||||
{
|
||||
"issues": {
|
||||
"integration_removed": {
|
||||
"title": "The JuiceNet integration has been removed",
|
||||
"description": "Enel X has dropped support for JuiceNet in favor of JuicePass, and the JuiceNet integration has been removed from Home Assistant as it was no longer working.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing JuiceNet integration entries]({entries})."
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
},
|
||||
"description": "You will need the API Token from https://home.juice.net/Manage.",
|
||||
"title": "Connect to JuiceNet"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"number": {
|
||||
"amperage_limit": {
|
||||
"name": "Amperage limit"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"charge_time": {
|
||||
"name": "Charge time"
|
||||
},
|
||||
"energy_added": {
|
||||
"name": "Energy added"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"charge_now": {
|
||||
"name": "Charge now"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
53
homeassistant/components/juicenet/switch.py
Normal file
53
homeassistant/components/juicenet/switch.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Support for monitoring juicenet/juicepoint/juicebox based EVSE switches."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyjuicenet import Charger
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR
|
||||
from .coordinator import JuiceNetCoordinator
|
||||
from .device import JuiceNetApi
|
||||
from .entity import JuiceNetEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the JuiceNet switches."""
|
||||
juicenet_data = hass.data[DOMAIN][config_entry.entry_id]
|
||||
api: JuiceNetApi = juicenet_data[JUICENET_API]
|
||||
coordinator: JuiceNetCoordinator = juicenet_data[JUICENET_COORDINATOR]
|
||||
|
||||
async_add_entities(
|
||||
JuiceNetChargeNowSwitch(device, coordinator) for device in api.devices
|
||||
)
|
||||
|
||||
|
||||
class JuiceNetChargeNowSwitch(JuiceNetEntity, SwitchEntity):
|
||||
"""Implementation of a JuiceNet switch."""
|
||||
|
||||
_attr_translation_key = "charge_now"
|
||||
|
||||
def __init__(self, device: Charger, coordinator: JuiceNetCoordinator) -> None:
|
||||
"""Initialise the switch."""
|
||||
super().__init__(device, "charge_now", coordinator)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if switch is on."""
|
||||
return self.device.override_time != 0
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Charge now."""
|
||||
await self.device.set_override(True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Don't charge now."""
|
||||
await self.device.set_override(False)
|
||||
@@ -146,27 +146,17 @@ class KeeneticOptionsFlowHandler(OptionsFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
if (
|
||||
not hasattr(self.config_entry, "runtime_data")
|
||||
or not self.config_entry.runtime_data
|
||||
):
|
||||
return self.async_abort(reason="not_initialized")
|
||||
|
||||
router = self.config_entry.runtime_data
|
||||
|
||||
try:
|
||||
interfaces: list[InterfaceInfo] = await self.hass.async_add_executor_job(
|
||||
router.client.get_interfaces
|
||||
)
|
||||
except ConnectionException:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
interfaces: list[InterfaceInfo] = await self.hass.async_add_executor_job(
|
||||
router.client.get_interfaces
|
||||
)
|
||||
|
||||
self._interface_options = {
|
||||
interface.name: (interface.description or interface.name)
|
||||
for interface in interfaces
|
||||
if interface.type.lower() == "bridge"
|
||||
}
|
||||
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_user(
|
||||
@@ -192,13 +182,9 @@ class KeeneticOptionsFlowHandler(OptionsFlow):
|
||||
): int,
|
||||
vol.Required(
|
||||
CONF_INTERFACES,
|
||||
default=[
|
||||
item
|
||||
for item in self.config_entry.options.get(
|
||||
CONF_INTERFACES, [DEFAULT_INTERFACE]
|
||||
)
|
||||
if item in self._interface_options
|
||||
],
|
||||
default=self.config_entry.options.get(
|
||||
CONF_INTERFACES, [DEFAULT_INTERFACE]
|
||||
),
|
||||
): cv.multi_select(self._interface_options),
|
||||
vol.Optional(
|
||||
CONF_TRY_HOTSPOT,
|
||||
|
||||
@@ -36,10 +36,6 @@
|
||||
"include_associated": "Use Wi-Fi AP associations data (ignored if hotspot data used)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"not_initialized": "The integration is not initialized yet. Can't display available options."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"xknx==3.8.0",
|
||||
"xknxproject==3.8.2",
|
||||
|
||||
@@ -13,7 +13,7 @@ rules:
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name:
|
||||
@@ -41,8 +41,8 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
@@ -64,24 +64,21 @@ rules:
|
||||
comment: |
|
||||
YAML entities don't support devices. UI entities support user-defined devices.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
KNX doesn't support any provided discovery method.
|
||||
discovery-update-info: todo
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
KNX doesn't support any provided discovery method.
|
||||
docs-data-update: done
|
||||
docs-data-update: todo
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Devices aren't supported directly since communication is on group address level.
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
"""Support for LaMetric time."""
|
||||
|
||||
from homeassistant.components import notify as hass_notify
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, discovery
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import LaMetricConfigEntry, LaMetricDataUpdateCoordinator
|
||||
from .coordinator import LaMetricDataUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -16,16 +17,16 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the LaMetric integration."""
|
||||
async_setup_services(hass)
|
||||
|
||||
hass.data[DOMAIN] = {"hass_config": config}
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: LaMetricConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up LaMetric from a config entry."""
|
||||
coordinator = LaMetricDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
hass.data[DOMAIN][entry.entry_id] = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Set up notify platform, no entry support for notify component yet,
|
||||
@@ -36,14 +37,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMetricConfigEntry) ->
|
||||
Platform.NOTIFY,
|
||||
DOMAIN,
|
||||
{CONF_NAME: coordinator.data.name, "entry_id": entry.entry_id},
|
||||
{},
|
||||
hass.data[DOMAIN]["hass_config"],
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: LaMetricConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload LaMetric config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
del hass.data[DOMAIN][entry.entry_id]
|
||||
await hass_notify.async_reload(hass, DOMAIN)
|
||||
return unload_ok
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user