mirror of
https://github.com/home-assistant/core.git
synced 2025-10-07 18:59:32 +00:00
Compare commits
3 Commits
dev
...
improve-zh
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c1b799856a | ||
![]() |
2f4e3b98f3 | ||
![]() |
f2c354eb3d |
710
.github/workflows/ci.yaml
vendored
710
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
@@ -326,7 +326,6 @@ homeassistant.components.london_underground.*
|
||||
homeassistant.components.lookin.*
|
||||
homeassistant.components.lovelace.*
|
||||
homeassistant.components.luftdaten.*
|
||||
homeassistant.components.lunatone.*
|
||||
homeassistant.components.madvr.*
|
||||
homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
@@ -555,7 +554,6 @@ homeassistant.components.vacuum.*
|
||||
homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.vivotek.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
homeassistant.components.volvo.*
|
||||
|
6
CODEOWNERS
generated
6
CODEOWNERS
generated
@@ -910,8 +910,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/luci/ @mzdrale
|
||||
/homeassistant/components/luftdaten/ @fabaff @frenck
|
||||
/tests/components/luftdaten/ @fabaff @frenck
|
||||
/homeassistant/components/lunatone/ @MoonDevLT
|
||||
/tests/components/lunatone/ @MoonDevLT
|
||||
/homeassistant/components/lupusec/ @majuss @suaveolent
|
||||
/tests/components/lupusec/ @majuss @suaveolent
|
||||
/homeassistant/components/lutron/ @cdheiser @wilburCForce
|
||||
@@ -1065,8 +1063,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/nilu/ @hfurubotten
|
||||
/homeassistant/components/nina/ @DeerMaximum
|
||||
/tests/components/nina/ @DeerMaximum
|
||||
/homeassistant/components/nintendo_parental/ @pantherale0
|
||||
/tests/components/nintendo_parental/ @pantherale0
|
||||
/homeassistant/components/nissan_leaf/ @filcole
|
||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
@@ -1198,6 +1194,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
/tests/components/point/ @fredrike
|
||||
/homeassistant/components/pooldose/ @lmaertin
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -616,34 +616,34 @@ async def async_enable_logging(
|
||||
),
|
||||
)
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
|
||||
# Log errors to a file if we have write access to file or config dir
|
||||
if log_file is None:
|
||||
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
if "SUPERVISOR" in os.environ:
|
||||
_LOGGER.info("Running in Supervisor, not logging to file")
|
||||
# Rename the default log file if it exists, since previous versions created
|
||||
# it even on Supervisor
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
err_log_path = None
|
||||
else:
|
||||
err_log_path = default_log_path
|
||||
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
else:
|
||||
err_log_path = os.path.abspath(log_file)
|
||||
|
||||
if err_log_path:
|
||||
err_path_exists = os.path.isfile(err_log_path)
|
||||
err_dir = os.path.dirname(err_log_path)
|
||||
|
||||
# Check if we can write to the error log if it exists or that
|
||||
# we can create files in the containing directory if not.
|
||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
||||
):
|
||||
err_handler = await hass.async_add_executor_job(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
)
|
||||
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.addHandler(err_handler)
|
||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
|
||||
# Save the log file location for access by other components.
|
||||
hass.data[DATA_LOGGING] = err_log_path
|
||||
else:
|
||||
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
||||
|
||||
async_activate_log_queue_handler(hass)
|
||||
|
||||
|
5
homeassistant/brands/ibm.json
Normal file
5
homeassistant/brands/ibm.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "ibm",
|
||||
"name": "IBM",
|
||||
"integrations": ["watson_iot", "watson_tts"]
|
||||
}
|
@@ -12,13 +12,11 @@ from homeassistant.components.bluetooth import async_get_scanner
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
UPDATE_DEBOUNCE_TIME = 0.2
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -40,19 +38,11 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
debouncer = Debouncer(
|
||||
hass=hass,
|
||||
logger=_LOGGER,
|
||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
||||
immediate=True,
|
||||
function=self.async_update_listeners,
|
||||
)
|
||||
|
||||
self._scale = AcaiaScale(
|
||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||
name=entry.title,
|
||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||
notify_callback=debouncer.async_schedule_call,
|
||||
notify_callback=self.async_update_listeners,
|
||||
scanner=async_get_scanner(hass),
|
||||
)
|
||||
|
||||
|
@@ -1,9 +1,6 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"air_quality": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"cloud_ceiling": {
|
||||
"default": "mdi:weather-fog"
|
||||
},
|
||||
@@ -37,6 +34,9 @@
|
||||
"thunderstorm_probability_night": {
|
||||
"default": "mdi:weather-lightning"
|
||||
},
|
||||
"translation_key": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"tree_pollen": {
|
||||
"default": "mdi:tree-outline"
|
||||
},
|
||||
|
@@ -1,9 +1,7 @@
|
||||
"""Airgradient Update platform."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airgradient import AirGradientConnectionError
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||
@@ -15,7 +13,6 @@ from .entity import AirGradientEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
SCAN_INTERVAL = timedelta(hours=1)
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -34,7 +31,6 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
||||
"""Representation of Airgradient Update."""
|
||||
|
||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||
_server_unreachable_logged = False
|
||||
|
||||
def __init__(self, coordinator: AirGradientCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
@@ -51,27 +47,10 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
||||
"""Return the installed version of the entity."""
|
||||
return self.coordinator.data.measures.firmware_version
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self._attr_available
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity."""
|
||||
try:
|
||||
self._attr_latest_version = (
|
||||
await self.coordinator.client.get_latest_firmware_version(
|
||||
self.coordinator.serial_number
|
||||
)
|
||||
)
|
||||
except AirGradientConnectionError:
|
||||
self._attr_latest_version = None
|
||||
self._attr_available = False
|
||||
if not self._server_unreachable_logged:
|
||||
_LOGGER.error(
|
||||
"Unable to connect to AirGradient server to check for updates"
|
||||
)
|
||||
self._server_unreachable_logged = True
|
||||
else:
|
||||
self._server_unreachable_logged = False
|
||||
self._attr_available = True
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.5"]
|
||||
"requirements": ["airos==0.5.3"]
|
||||
}
|
||||
|
@@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
URL_API_INTEGRATION = {
|
||||
"url": "https://dashboard.airthings.com/integrations/api-integration"
|
||||
}
|
||||
|
||||
|
||||
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Airthings."""
|
||||
@@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
description_placeholders=URL_API_INTEGRATION,
|
||||
description_placeholders={
|
||||
"url": (
|
||||
"https://dashboard.airthings.com/integrations/api-integration"
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
errors = {}
|
||||
@@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title="Airthings", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders=URL_API_INTEGRATION,
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
@@ -4,9 +4,9 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"id": "ID",
|
||||
"secret": "Secret"
|
||||
},
|
||||
"description": "Log in at {url} to find your credentials"
|
||||
"secret": "Secret",
|
||||
"description": "Login at {url} to find your credentials"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@@ -6,13 +6,8 @@ import dataclasses
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from airthings_ble import (
|
||||
AirthingsBluetoothDeviceData,
|
||||
AirthingsDevice,
|
||||
UnsupportedDeviceError,
|
||||
)
|
||||
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
||||
from bleak import BleakError
|
||||
from habluetooth import BluetoothServiceInfoBleak
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
@@ -32,7 +27,6 @@ SERVICE_UUIDS = [
|
||||
"b42e4a8e-ade7-11e4-89d3-123b93f75cba",
|
||||
"b42e1c08-ade7-11e4-89d3-123b93f75cba",
|
||||
"b42e3882-ade7-11e4-89d3-123b93f75cba",
|
||||
"b42e90a2-ade7-11e4-89d3-123b93f75cba",
|
||||
]
|
||||
|
||||
|
||||
@@ -43,7 +37,6 @@ class Discovery:
|
||||
name: str
|
||||
discovery_info: BluetoothServiceInfo
|
||||
device: AirthingsDevice
|
||||
data: AirthingsBluetoothDeviceData
|
||||
|
||||
|
||||
def get_name(device: AirthingsDevice) -> str:
|
||||
@@ -51,7 +44,7 @@ def get_name(device: AirthingsDevice) -> str:
|
||||
|
||||
name = device.friendly_name()
|
||||
if identifier := device.identifier:
|
||||
name += f" ({device.model.value}{identifier})"
|
||||
name += f" ({identifier})"
|
||||
return name
|
||||
|
||||
|
||||
@@ -69,8 +62,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._discovered_device: Discovery | None = None
|
||||
self._discovered_devices: dict[str, Discovery] = {}
|
||||
|
||||
async def _get_device(
|
||||
self, data: AirthingsBluetoothDeviceData, discovery_info: BluetoothServiceInfo
|
||||
async def _get_device_data(
|
||||
self, discovery_info: BluetoothServiceInfo
|
||||
) -> AirthingsDevice:
|
||||
ble_device = bluetooth.async_ble_device_from_address(
|
||||
self.hass, discovery_info.address
|
||||
@@ -79,8 +72,10 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.debug("no ble_device in _get_device_data")
|
||||
raise AirthingsDeviceUpdateError("No ble_device")
|
||||
|
||||
airthings = AirthingsBluetoothDeviceData(_LOGGER)
|
||||
|
||||
try:
|
||||
device = await data.update_device(ble_device)
|
||||
data = await airthings.update_device(ble_device)
|
||||
except BleakError as err:
|
||||
_LOGGER.error(
|
||||
"Error connecting to and getting data from %s: %s",
|
||||
@@ -88,15 +83,12 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
err,
|
||||
)
|
||||
raise AirthingsDeviceUpdateError("Failed getting device data") from err
|
||||
except UnsupportedDeviceError:
|
||||
_LOGGER.debug("Skipping unsupported device: %s", discovery_info.name)
|
||||
raise
|
||||
except Exception as err:
|
||||
_LOGGER.error(
|
||||
"Unknown error occurred from %s: %s", discovery_info.address, err
|
||||
)
|
||||
raise
|
||||
return device
|
||||
return data
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfo
|
||||
@@ -106,21 +98,17 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(discovery_info.address)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
data = AirthingsBluetoothDeviceData(logger=_LOGGER)
|
||||
|
||||
try:
|
||||
device = await self._get_device(data=data, discovery_info=discovery_info)
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except UnsupportedDeviceError:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
name = get_name(device)
|
||||
self.context["title_placeholders"] = {"name": name}
|
||||
self._discovered_device = Discovery(name, discovery_info, device, data=data)
|
||||
self._discovered_device = Discovery(name, discovery_info, device)
|
||||
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
@@ -129,12 +117,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
if user_input is not None:
|
||||
if (
|
||||
self._discovered_device is not None
|
||||
and self._discovered_device.device.firmware.need_firmware_upgrade
|
||||
):
|
||||
return self.async_abort(reason="firmware_upgrade_required")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.context["title_placeholders"]["name"], data={}
|
||||
)
|
||||
@@ -155,9 +137,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured()
|
||||
discovery = self._discovered_devices[address]
|
||||
|
||||
if discovery.device.firmware.need_firmware_upgrade:
|
||||
return self.async_abort(reason="firmware_upgrade_required")
|
||||
|
||||
self.context["title_placeholders"] = {
|
||||
"name": discovery.name,
|
||||
}
|
||||
@@ -167,53 +146,32 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title=discovery.name, data={})
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
devices: list[BluetoothServiceInfoBleak] = []
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
continue
|
||||
|
||||
if MFCT_ID not in discovery_info.manufacturer_data:
|
||||
continue
|
||||
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
||||
_LOGGER.debug(
|
||||
"Skipping unsupported device: %s (%s)", discovery_info.name, address
|
||||
)
|
||||
continue
|
||||
devices.append(discovery_info)
|
||||
|
||||
for discovery_info in devices:
|
||||
address = discovery_info.address
|
||||
data = AirthingsBluetoothDeviceData(logger=_LOGGER)
|
||||
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
||||
continue
|
||||
|
||||
try:
|
||||
device = await self._get_device(data, discovery_info)
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
_LOGGER.error(
|
||||
"Error connecting to and getting data from %s (%s)",
|
||||
discovery_info.name,
|
||||
discovery_info.address,
|
||||
)
|
||||
continue
|
||||
except UnsupportedDeviceError:
|
||||
_LOGGER.debug(
|
||||
"Skipping unsupported device: %s (%s)",
|
||||
discovery_info.name,
|
||||
discovery_info.address,
|
||||
)
|
||||
continue
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
name = get_name(device)
|
||||
_LOGGER.debug("Discovered Airthings device: %s (%s)", name, address)
|
||||
self._discovered_devices[address] = Discovery(
|
||||
name, discovery_info, device, data
|
||||
)
|
||||
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
titles = {
|
||||
address: get_name(discovery.device)
|
||||
address: discovery.device.name
|
||||
for (address, discovery) in self._discovered_devices.items()
|
||||
}
|
||||
return self.async_show_form(
|
||||
|
@@ -17,10 +17,6 @@
|
||||
{
|
||||
"manufacturer_id": 820,
|
||||
"service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba"
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 820,
|
||||
"service_uuid": "b42e90a2-ade7-11e4-89d3-123b93f75cba"
|
||||
}
|
||||
],
|
||||
"codeowners": ["@vincegio", "@LaStrada"],
|
||||
@@ -28,5 +24,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airthings-ble==1.1.1"]
|
||||
"requirements": ["airthings-ble==0.9.2"]
|
||||
}
|
||||
|
@@ -16,12 +16,10 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
Platform,
|
||||
UnitOfPressure,
|
||||
UnitOfSoundPressure,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -114,25 +112,8 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"lux": SensorEntityDescription(
|
||||
key="lux",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"noise": SensorEntityDescription(
|
||||
key="noise",
|
||||
translation_key="ambient_noise",
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:
|
||||
|
@@ -6,9 +6,6 @@
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:common::config_flow::data::device%]"
|
||||
},
|
||||
"data_description": {
|
||||
"address": "The Airthings devices discovered via Bluetooth."
|
||||
}
|
||||
},
|
||||
"bluetooth_confirm": {
|
||||
@@ -20,8 +17,6 @@
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"firmware_upgrade_required": "Your device requires a firmware upgrade. Please use the Airthings app (Android/iOS) to upgrade it.",
|
||||
"unsupported_device": "Unsupported device",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
@@ -41,9 +36,6 @@
|
||||
},
|
||||
"illuminance": {
|
||||
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
||||
},
|
||||
"ambient_noise": {
|
||||
"name": "Ambient noise"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,14 +2,17 @@
|
||||
|
||||
from airtouch4pyapi import AirTouch
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator
|
||||
from .coordinator import AirtouchDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
|
||||
"""Set up AirTouch4 from a config entry."""
|
||||
@@ -19,7 +22,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) ->
|
||||
info = airtouch.GetAcs()
|
||||
if not info:
|
||||
raise ConfigEntryNotReady
|
||||
coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch)
|
||||
coordinator = AirtouchDataUpdateCoordinator(hass, airtouch)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
|
@@ -2,34 +2,26 @@
|
||||
|
||||
import logging
|
||||
|
||||
from airtouch4pyapi import AirTouch
|
||||
from airtouch4pyapi.airtouch import AirTouchStatus
|
||||
|
||||
from homeassistant.components.climate import SCAN_INTERVAL
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to manage fetching Airtouch data."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch
|
||||
) -> None:
|
||||
def __init__(self, hass, airtouch):
|
||||
"""Initialize global Airtouch data updater."""
|
||||
self.airtouch = airtouch
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
@@ -18,9 +18,7 @@ from homeassistant.components.binary_sensor import (
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_update_unique_id
|
||||
@@ -53,47 +51,11 @@ BINARY_SENSORS: Final = (
|
||||
),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
is_available_fn=lambda device, key: (
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
device.online and device.sensors[key].error is False
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
DEPRECATED_BINARY_SENSORS: Final = (
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="bluetooth",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
translation_key="bluetooth",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="babyCryDetectionState",
|
||||
translation_key="baby_cry_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="beepingApplianceDetectionState",
|
||||
translation_key="beeping_appliance_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="coughDetectionState",
|
||||
translation_key="cough_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="dogBarkDetectionState",
|
||||
translation_key="dog_bark_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="waterSoundsDetectionState",
|
||||
translation_key="water_sounds_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -104,8 +66,6 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
# Replace unique id for "detectionState" binary sensor
|
||||
await async_update_unique_id(
|
||||
hass,
|
||||
@@ -115,16 +75,6 @@ async def async_setup_entry(
|
||||
"detectionState",
|
||||
)
|
||||
|
||||
# Clean up deprecated sensors
|
||||
for sensor_desc in DEPRECATED_BINARY_SENSORS:
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-{sensor_desc.key}"
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
BINARY_SENSOR_DOMAIN, DOMAIN, unique_id
|
||||
):
|
||||
_LOGGER.debug("Removing deprecated entity %s", entity_id)
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.9"]
|
||||
"requirements": ["aioamazondevices==6.2.7"]
|
||||
}
|
||||
|
@@ -32,9 +32,7 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
|
||||
|
||||
@@ -42,9 +40,9 @@ SENSORS: Final = (
|
||||
AmazonSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement_fn=lambda device, key: (
|
||||
native_unit_of_measurement_fn=lambda device, _key: (
|
||||
UnitOfTemperature.CELSIUS
|
||||
if key in device.sensors and device.sensors[key].scale == "CELSIUS"
|
||||
if device.sensors[_key].scale == "CELSIUS"
|
||||
else UnitOfTemperature.FAHRENHEIT
|
||||
),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
|
@@ -18,11 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import (
|
||||
alexa_api_call,
|
||||
async_remove_dnd_from_virtual_group,
|
||||
async_update_unique_id,
|
||||
)
|
||||
from .utils import alexa_api_call, async_update_unique_id
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -33,9 +29,7 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
method: str
|
||||
|
||||
@@ -64,9 +58,6 @@ async def async_setup_entry(
|
||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||
)
|
||||
|
||||
# Remove DND switch from virtual groups
|
||||
await async_remove_dnd_from_virtual_group(hass, coordinator)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
|
@@ -4,10 +4,8 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
@@ -63,21 +61,3 @@ async def async_update_unique_id(
|
||||
|
||||
# Update the registry with the new unique_id
|
||||
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||
|
||||
|
||||
async def async_remove_dnd_from_virtual_group(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
) -> None:
|
||||
"""Remove entity DND from virtual group."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-do_not_disturb"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
DOMAIN, SWITCH_DOMAIN, unique_id
|
||||
)
|
||||
is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY
|
||||
if entity_id and is_group:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
||||
|
@@ -65,31 +65,6 @@ SENSOR_DESCRIPTIONS = [
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BME280"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
key="BME680_humidity",
|
||||
translation_key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BME680"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
key="BME680_pressure",
|
||||
translation_key="pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.PA,
|
||||
suggested_unit_of_measurement=UnitOfPressure.MMHG,
|
||||
suggested_display_precision=0,
|
||||
translation_placeholders={"sensor_name": "BME680"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
key="BME680_temperature",
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BME680"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
key="BMP_pressure",
|
||||
|
@@ -505,7 +505,7 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return detailed information about entities and devices."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
@@ -513,8 +513,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
|
||||
integration_configs: dict[str, AnalyticsModifications] = {}
|
||||
|
||||
removed_devices: set[str] = set()
|
||||
|
||||
# Get device list
|
||||
for device_entry in dev_reg.devices.values():
|
||||
if not device_entry.primary_config_entry:
|
||||
@@ -527,10 +525,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
if device_entry.entry_type is dr.DeviceEntryType.SERVICE:
|
||||
removed_devices.add(device_entry.id)
|
||||
continue
|
||||
|
||||
integration_domain = config_entry.domain
|
||||
|
||||
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
||||
@@ -620,15 +614,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
device_config = integration_config.devices.get(device_id, device_config)
|
||||
|
||||
if device_config.remove:
|
||||
removed_devices.add(device_id)
|
||||
continue
|
||||
|
||||
device_entry = dev_reg.devices[device_id]
|
||||
|
||||
device_id_mapping[device_id] = (integration_domain, len(devices_info))
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
|
||||
devices_info.append(
|
||||
{
|
||||
"entities": [],
|
||||
"entry_type": device_entry.entry_type,
|
||||
"has_configuration_url": device_entry.configuration_url is not None,
|
||||
"hw_version": device_entry.hw_version,
|
||||
@@ -637,7 +631,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"model_id": device_entry.model_id,
|
||||
"sw_version": device_entry.sw_version,
|
||||
"via_device": device_entry.via_device_id,
|
||||
"entities": [],
|
||||
}
|
||||
)
|
||||
|
||||
@@ -676,7 +669,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
|
||||
entity_entry = ent_reg.entities[entity_id]
|
||||
|
||||
entity_state = hass.states.get(entity_id)
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
@@ -697,18 +690,14 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
|
||||
if (device_id_ := entity_entry.device_id) is not None:
|
||||
if device_id_ in removed_devices:
|
||||
# The device was removed, so we remove the entity too
|
||||
continue
|
||||
|
||||
if (
|
||||
new_device_id := device_id_mapping.get(device_id_)
|
||||
) is not None and (new_device_id[0] == integration_domain):
|
||||
((device_id_ := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
continue
|
||||
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
|
||||
return {
|
||||
|
@@ -19,8 +19,9 @@ CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-5", # Both sonnet and haiku
|
||||
"claude-3-opus",
|
||||
"claude-3-haiku",
|
||||
THINKING_MODELS = [
|
||||
"claude-3-7-sonnet",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-1",
|
||||
]
|
||||
|
@@ -51,11 +51,11 @@ from .const import (
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@@ -364,7 +364,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
model.startswith(tuple(THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
):
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.69.0"]
|
||||
"requirements": ["anthropic==0.62.0"]
|
||||
}
|
||||
|
@@ -2,7 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, TypeVar
|
||||
|
||||
T = TypeVar("T", dict[str, Any], list[Any], None)
|
||||
|
||||
TRANSLATION_MAP = {
|
||||
"wan_rx": "sensor_rx_bytes",
|
||||
@@ -34,7 +36,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
|
||||
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
|
||||
|
||||
|
||||
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T:
|
||||
def translate_to_legacy(raw: T) -> T:
|
||||
"""Translate raw data to legacy format for dicts and lists."""
|
||||
|
||||
if raw is None:
|
||||
|
@@ -26,6 +26,9 @@ async def async_setup_entry(
|
||||
|
||||
if CONF_HOST in config_entry.data:
|
||||
coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session)
|
||||
config_entry.async_on_unload(
|
||||
config_entry.add_update_listener(_async_update_listener)
|
||||
)
|
||||
else:
|
||||
coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session)
|
||||
|
||||
@@ -33,11 +36,6 @@ async def async_setup_entry(
|
||||
|
||||
config_entry.runtime_data = coordinator
|
||||
|
||||
if CONF_HOST in config_entry.data:
|
||||
config_entry.async_on_unload(
|
||||
config_entry.add_update_listener(_async_update_listener)
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
@@ -272,13 +272,6 @@ async def async_setup_entry(
|
||||
observations: list[ConfigType] = [
|
||||
dict(subentry.data) for subentry in config_entry.subentries.values()
|
||||
]
|
||||
|
||||
for observation in observations:
|
||||
if observation[CONF_PLATFORM] == CONF_TEMPLATE:
|
||||
observation[CONF_VALUE_TEMPLATE] = Template(
|
||||
observation[CONF_VALUE_TEMPLATE], hass
|
||||
)
|
||||
|
||||
prior: float = config[CONF_PRIOR]
|
||||
probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD]
|
||||
device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)
|
||||
|
@@ -19,8 +19,8 @@
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.3",
|
||||
"dbus-fast==2.44.5",
|
||||
"habluetooth==5.7.0"
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.6.4"
|
||||
]
|
||||
}
|
||||
|
@@ -51,6 +51,12 @@ from homeassistant.const import (
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
@@ -112,6 +118,12 @@ ATTR_FILENAME: Final = "filename"
|
||||
ATTR_MEDIA_PLAYER: Final = "media_player"
|
||||
ATTR_FORMAT: Final = "format"
|
||||
|
||||
# These constants are deprecated as of Home Assistant 2024.10
|
||||
# Please use the StreamType enum instead.
|
||||
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
|
||||
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
|
||||
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
|
||||
|
||||
|
||||
class CameraEntityFeature(IntFlag):
|
||||
"""Supported features of the camera entity."""
|
||||
@@ -1105,3 +1117,11 @@ async def async_handle_record_service(
|
||||
duration=service_call.data[CONF_DURATION],
|
||||
lookback=service_call.data[CONF_LOOKBACK],
|
||||
)
|
||||
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
@@ -15,7 +15,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
from .utils import DeviceType, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -30,19 +29,23 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
known_devices: set[int] = set()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data["alarm_zones"])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitVedoBinarySensorEntity(
|
||||
coordinator, device, config_entry.entry_id
|
||||
)
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitVedoBinarySensorEntity(
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from asyncio.exceptions import TimeoutError
|
||||
from collections.abc import Mapping
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from aiocomelit import (
|
||||
@@ -28,20 +27,25 @@ from .utils import async_client_session
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = "111111"
|
||||
|
||||
|
||||
pin_regex = r"^[0-9]{4,10}$"
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
|
||||
)
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -51,9 +55,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
|
||||
api: ComelitCommonApi
|
||||
|
||||
if not re.fullmatch(r"[0-9]{4,10}", data[CONF_PIN]):
|
||||
raise InvalidPin
|
||||
|
||||
session = await async_client_session(hass)
|
||||
if data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
api = ComeliteSerialBridgeApi(
|
||||
@@ -104,8 +105,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -147,8 +146,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -192,8 +189,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -215,7 +210,3 @@ class CannotConnect(HomeAssistantError):
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
|
||||
class InvalidPin(HomeAssistantError):
|
||||
"""Error to indicate an invalid pin."""
|
||||
|
@@ -161,7 +161,7 @@ class ComelitSerialBridge(
|
||||
entry: ComelitConfigEntry,
|
||||
host: str,
|
||||
port: int,
|
||||
pin: str,
|
||||
pin: int,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
@@ -195,7 +195,7 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
entry: ComelitConfigEntry,
|
||||
host: str,
|
||||
port: int,
|
||||
pin: str,
|
||||
pin: int,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
|
@@ -7,21 +7,14 @@ from typing import Any, cast
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
STATE_CLOSED,
|
||||
STATE_CLOSING,
|
||||
STATE_OPEN,
|
||||
STATE_OPENING,
|
||||
CoverDeviceClass,
|
||||
CoverEntity,
|
||||
)
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -36,20 +29,22 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
known_devices: set[int] = set()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, COVER)
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data[COVER])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[COVER].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
"""Cover device."""
|
||||
@@ -67,6 +62,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
super().__init__(coordinator, device, config_entry_entry_id)
|
||||
# Device doesn't provide a status so we assume UNKNOWN at first startup
|
||||
self._last_action: int | None = None
|
||||
self._last_state: str | None = None
|
||||
|
||||
def _current_action(self, action: str) -> bool:
|
||||
"""Return the current cover action."""
|
||||
@@ -102,6 +98,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
@bridge_api_call
|
||||
async def _cover_set_state(self, action: int, state: int) -> None:
|
||||
"""Set desired cover state."""
|
||||
self._last_state = self.state
|
||||
await self.coordinator.api.set_device_status(COVER, self._device.index, action)
|
||||
self.coordinator.data[COVER][self._device.index].status = state
|
||||
self.async_write_ha_state()
|
||||
@@ -127,10 +124,5 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if (state := await self.async_get_last_state()) is not None:
|
||||
if state.state == STATE_CLOSED:
|
||||
self._last_action = STATE_COVER.index(STATE_CLOSING)
|
||||
if state.state == STATE_OPEN:
|
||||
self._last_action = STATE_COVER.index(STATE_OPENING)
|
||||
|
||||
self._attr_is_closed = state.state == STATE_CLOSED
|
||||
if last_state := await self.async_get_last_state():
|
||||
self._last_state = last_state.state
|
||||
|
@@ -12,7 +12,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -27,20 +27,22 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
known_devices: set[int] = set()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, LIGHT)
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data[LIGHT])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[LIGHT].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):
|
||||
"""Light device."""
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==1.1.1"]
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
}
|
||||
|
@@ -20,7 +20,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -66,23 +65,25 @@ async def async_setup_bridge_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data[OTHER])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitBridgeSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
)
|
||||
for sensor_desc in SENSOR_BRIDGE_TYPES
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, OTHER)
|
||||
for device in coordinator.data[OTHER].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
async def async_setup_vedo_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -93,23 +94,25 @@ async def async_setup_vedo_entry(
|
||||
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data["alarm_zones"])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitVedoSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
)
|
||||
for sensor_desc in SENSOR_VEDO_TYPES
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):
|
||||
"""Sensor device."""
|
||||
|
@@ -43,13 +43,11 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "The provided PIN is invalid. It must be a 4-10 digit number.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "[%key:component::comelit::config::abort::invalid_pin%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -28,21 +28,36 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
entities: list[ComelitSwitchEntity] = []
|
||||
entities.extend(
|
||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
for device in coordinator.data[IRRIGATION].values()
|
||||
)
|
||||
entities.extend(
|
||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[OTHER].values()
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
known_devices: dict[str, set[int]] = {
|
||||
dev_type: set() for dev_type in (IRRIGATION, OTHER)
|
||||
}
|
||||
|
||||
def _check_device() -> None:
|
||||
for dev_type in (IRRIGATION, OTHER):
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, dev_type)
|
||||
current_devices = set(coordinator.data[dev_type])
|
||||
new_devices = current_devices - known_devices[dev_type]
|
||||
if new_devices:
|
||||
known_devices[dev_type].update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):
|
||||
"""Switch device."""
|
||||
|
@@ -4,11 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aiocomelit.api import (
|
||||
ComelitSerialBridgeObject,
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
)
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession, CookieJar
|
||||
|
||||
@@ -23,11 +19,8 @@ from homeassistant.helpers import (
|
||||
)
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .coordinator import ComelitBaseCoordinator
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
|
||||
DeviceType = ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
|
||||
|
||||
|
||||
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
"""Return a new aiohttp session."""
|
||||
@@ -120,41 +113,3 @@ def bridge_api_call[_T: ComelitBridgeBaseEntity, **_P](
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
|
||||
return cmd_wrapper
|
||||
|
||||
|
||||
def new_device_listener(
|
||||
coordinator: ComelitBaseCoordinator,
|
||||
new_devices_callback: Callable[
|
||||
[
|
||||
list[
|
||||
ComelitSerialBridgeObject
|
||||
| ComelitVedoAreaObject
|
||||
| ComelitVedoZoneObject
|
||||
],
|
||||
str,
|
||||
],
|
||||
None,
|
||||
],
|
||||
data_type: str,
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to coordinator updates to check for new devices."""
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_devices() -> None:
|
||||
"""Check for new devices and call callback with any new monitors."""
|
||||
if not coordinator.data:
|
||||
return
|
||||
|
||||
new_devices: list[DeviceType] = []
|
||||
for _id in coordinator.data[data_type]:
|
||||
if _id not in known_devices:
|
||||
known_devices.add(_id)
|
||||
new_devices.append(coordinator.data[data_type][_id])
|
||||
|
||||
if new_devices:
|
||||
new_devices_callback(new_devices, data_type)
|
||||
|
||||
# Check for devices immediately
|
||||
_check_devices()
|
||||
|
||||
return coordinator.async_add_listener(_check_devices)
|
||||
|
@@ -38,30 +38,22 @@ from home_assistant_intents import (
|
||||
ErrorKey,
|
||||
FuzzyConfig,
|
||||
FuzzyLanguageResponses,
|
||||
LanguageScores,
|
||||
get_fuzzy_config,
|
||||
get_fuzzy_language,
|
||||
get_intents,
|
||||
get_language_scores,
|
||||
get_languages,
|
||||
)
|
||||
import yaml
|
||||
|
||||
from homeassistant import core
|
||||
from homeassistant.components.homeassistant.exposed_entities import (
|
||||
async_listen_entity_updates,
|
||||
async_should_expose,
|
||||
)
|
||||
from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.core import Event, callback
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
floor_registry as fr,
|
||||
@@ -200,7 +192,7 @@ class IntentCache:
|
||||
|
||||
|
||||
async def async_setup_default_agent(
|
||||
hass: HomeAssistant,
|
||||
hass: core.HomeAssistant,
|
||||
entity_component: EntityComponent[ConversationEntity],
|
||||
config_intents: dict[str, Any],
|
||||
) -> None:
|
||||
@@ -209,13 +201,15 @@ async def async_setup_default_agent(
|
||||
await entity_component.async_add_entities([agent])
|
||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||
|
||||
@callback
|
||||
def async_entity_state_listener(event: Event[EventStateChangedData]) -> None:
|
||||
@core.callback
|
||||
def async_entity_state_listener(
|
||||
event: core.Event[core.EventStateChangedData],
|
||||
) -> None:
|
||||
"""Set expose flag on new entities."""
|
||||
async_should_expose(hass, DOMAIN, event.data["entity_id"])
|
||||
|
||||
@callback
|
||||
def async_hass_started(hass: HomeAssistant) -> None:
|
||||
@core.callback
|
||||
def async_hass_started(hass: core.HomeAssistant) -> None:
|
||||
"""Set expose flag on all entities."""
|
||||
for state in hass.states.async_all():
|
||||
async_should_expose(hass, DOMAIN, state.entity_id)
|
||||
@@ -230,7 +224,9 @@ class DefaultAgent(ConversationEntity):
|
||||
_attr_name = "Home Assistant"
|
||||
_attr_supported_features = ConversationEntityFeature.CONTROL
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
|
||||
def __init__(
|
||||
self, hass: core.HomeAssistant, config_intents: dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize the default agent."""
|
||||
self.hass = hass
|
||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||
@@ -263,7 +259,7 @@ class DefaultAgent(ConversationEntity):
|
||||
"""Return a list of supported languages."""
|
||||
return get_languages()
|
||||
|
||||
@callback
|
||||
@core.callback
|
||||
def _filter_entity_registry_changes(
|
||||
self, event_data: er.EventEntityRegistryUpdatedData
|
||||
) -> bool:
|
||||
@@ -272,12 +268,12 @@ class DefaultAgent(ConversationEntity):
|
||||
field in event_data["changes"] for field in _ENTITY_REGISTRY_UPDATE_FIELDS
|
||||
)
|
||||
|
||||
@callback
|
||||
def _filter_state_changes(self, event_data: EventStateChangedData) -> bool:
|
||||
@core.callback
|
||||
def _filter_state_changes(self, event_data: core.EventStateChangedData) -> bool:
|
||||
"""Filter state changed events."""
|
||||
return not event_data["old_state"] or not event_data["new_state"]
|
||||
|
||||
@callback
|
||||
@core.callback
|
||||
def _listen_clear_slot_list(self) -> None:
|
||||
"""Listen for changes that can invalidate slot list."""
|
||||
assert self._unsub_clear_slot_list is None
|
||||
@@ -346,81 +342,6 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
return result
|
||||
|
||||
async def async_debug_recognize(
|
||||
self, user_input: ConversationInput
|
||||
) -> dict[str, Any] | None:
|
||||
"""Debug recognize from user input."""
|
||||
result_dict: dict[str, Any] | None = None
|
||||
|
||||
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
|
||||
result_dict = {
|
||||
# Matched a user-defined sentence trigger.
|
||||
# We can't provide the response here without executing the
|
||||
# trigger.
|
||||
"match": True,
|
||||
"source": "trigger",
|
||||
"sentence_template": trigger_result.sentence_template or "",
|
||||
}
|
||||
elif intent_result := await self.async_recognize_intent(user_input):
|
||||
successful_match = not intent_result.unmatched_entities
|
||||
result_dict = {
|
||||
# Name of the matching intent (or the closest)
|
||||
"intent": {
|
||||
"name": intent_result.intent.name,
|
||||
},
|
||||
# Slot values that would be received by the intent
|
||||
"slots": { # direct access to values
|
||||
entity_key: entity.text or entity.value
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Extra slot details, such as the originally matched text
|
||||
"details": {
|
||||
entity_key: {
|
||||
"name": entity.name,
|
||||
"value": entity.value,
|
||||
"text": entity.text,
|
||||
}
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Entities/areas/etc. that would be targeted
|
||||
"targets": {},
|
||||
# True if match was successful
|
||||
"match": successful_match,
|
||||
# Text of the sentence template that matched (or was closest)
|
||||
"sentence_template": "",
|
||||
# When match is incomplete, this will contain the best slot guesses
|
||||
"unmatched_slots": _get_unmatched_slots(intent_result),
|
||||
# True if match was not exact
|
||||
"fuzzy_match": False,
|
||||
}
|
||||
|
||||
if successful_match:
|
||||
result_dict["targets"] = {
|
||||
state.entity_id: {"matched": is_matched}
|
||||
for state, is_matched in _get_debug_targets(
|
||||
self.hass, intent_result
|
||||
)
|
||||
}
|
||||
|
||||
if intent_result.intent_sentence is not None:
|
||||
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
||||
|
||||
if intent_result.intent_metadata:
|
||||
# Inspect metadata to determine if this matched a custom sentence
|
||||
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
||||
result_dict["source"] = "custom"
|
||||
result_dict["file"] = intent_result.intent_metadata.get(
|
||||
METADATA_CUSTOM_FILE
|
||||
)
|
||||
else:
|
||||
result_dict["source"] = "builtin"
|
||||
|
||||
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
||||
METADATA_FUZZY_MATCH, False
|
||||
)
|
||||
|
||||
return result_dict
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: ConversationInput,
|
||||
@@ -969,7 +890,7 @@ class DefaultAgent(ConversationEntity):
|
||||
) -> str:
|
||||
# Get first matched or unmatched state.
|
||||
# This is available in the response template as "state".
|
||||
state1: State | None = None
|
||||
state1: core.State | None = None
|
||||
if intent_response.matched_states:
|
||||
state1 = intent_response.matched_states[0]
|
||||
elif intent_response.unmatched_states:
|
||||
@@ -1607,10 +1528,6 @@ class DefaultAgent(ConversationEntity):
|
||||
return None
|
||||
return response
|
||||
|
||||
async def async_get_language_scores(self) -> dict[str, LanguageScores]:
|
||||
"""Get support scores per language."""
|
||||
return await self.hass.async_add_executor_job(get_language_scores)
|
||||
|
||||
|
||||
def _make_error_result(
|
||||
language: str,
|
||||
@@ -1672,7 +1589,7 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str
|
||||
|
||||
|
||||
def _get_match_error_response(
|
||||
hass: HomeAssistant,
|
||||
hass: core.HomeAssistant,
|
||||
match_error: intent.MatchFailedError,
|
||||
) -> tuple[ErrorKey, dict[str, Any]]:
|
||||
"""Return key and template arguments for error when target matching fails."""
|
||||
@@ -1807,75 +1724,3 @@ def _collect_list_references(expression: Expression, list_names: set[str]) -> No
|
||||
elif isinstance(expression, ListReference):
|
||||
# {list}
|
||||
list_names.add(expression.slot_name)
|
||||
|
||||
|
||||
def _get_debug_targets(
|
||||
hass: HomeAssistant,
|
||||
result: RecognizeResult,
|
||||
) -> Iterable[tuple[State, bool]]:
|
||||
"""Yield state/is_matched pairs for a hassil recognition."""
|
||||
entities = result.entities
|
||||
|
||||
name: str | None = None
|
||||
area_name: str | None = None
|
||||
domains: set[str] | None = None
|
||||
device_classes: set[str] | None = None
|
||||
state_names: set[str] | None = None
|
||||
|
||||
if "name" in entities:
|
||||
name = str(entities["name"].value)
|
||||
|
||||
if "area" in entities:
|
||||
area_name = str(entities["area"].value)
|
||||
|
||||
if "domain" in entities:
|
||||
domains = set(cv.ensure_list(entities["domain"].value))
|
||||
|
||||
if "device_class" in entities:
|
||||
device_classes = set(cv.ensure_list(entities["device_class"].value))
|
||||
|
||||
if "state" in entities:
|
||||
# HassGetState only
|
||||
state_names = set(cv.ensure_list(entities["state"].value))
|
||||
|
||||
if (
|
||||
(name is None)
|
||||
and (area_name is None)
|
||||
and (not domains)
|
||||
and (not device_classes)
|
||||
and (not state_names)
|
||||
):
|
||||
# Avoid "matching" all entities when there is no filter
|
||||
return
|
||||
|
||||
states = intent.async_match_states(
|
||||
hass,
|
||||
name=name,
|
||||
area_name=area_name,
|
||||
domains=domains,
|
||||
device_classes=device_classes,
|
||||
)
|
||||
|
||||
for state in states:
|
||||
# For queries, a target is "matched" based on its state
|
||||
is_matched = (state_names is None) or (state.state in state_names)
|
||||
yield state, is_matched
|
||||
|
||||
|
||||
def _get_unmatched_slots(
|
||||
result: RecognizeResult,
|
||||
) -> dict[str, str | int | float]:
|
||||
"""Return a dict of unmatched text/range slot entities."""
|
||||
unmatched_slots: dict[str, str | int | float] = {}
|
||||
for entity in result.unmatched_entities_list:
|
||||
if isinstance(entity, UnmatchedTextEntity):
|
||||
if entity.text == MISSING_ENTITY:
|
||||
# Don't report <missing> since these are just missing context
|
||||
# slots.
|
||||
continue
|
||||
|
||||
unmatched_slots[entity.name] = entity.text
|
||||
elif isinstance(entity, UnmatchedRangeEntity):
|
||||
unmatched_slots[entity.name] = entity.value
|
||||
|
||||
return unmatched_slots
|
||||
|
@@ -2,16 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from hassil.recognize import MISSING_ENTITY, RecognizeResult
|
||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||
from home_assistant_intents import get_language_scores
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http, websocket_api
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, intent
|
||||
from homeassistant.util import language as language_util
|
||||
|
||||
from .agent_manager import (
|
||||
@@ -21,6 +26,11 @@ from .agent_manager import (
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT
|
||||
from .default_agent import (
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
METADATA_FUZZY_MATCH,
|
||||
)
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput
|
||||
|
||||
@@ -196,12 +206,150 @@ async def websocket_hass_agent_debug(
|
||||
language=msg.get("language", hass.config.language),
|
||||
agent_id=agent.entity_id,
|
||||
)
|
||||
result_dict = await agent.async_debug_recognize(user_input)
|
||||
result_dict: dict[str, Any] | None = None
|
||||
|
||||
if trigger_result := await agent.async_recognize_sentence_trigger(user_input):
|
||||
result_dict = {
|
||||
# Matched a user-defined sentence trigger.
|
||||
# We can't provide the response here without executing the
|
||||
# trigger.
|
||||
"match": True,
|
||||
"source": "trigger",
|
||||
"sentence_template": trigger_result.sentence_template or "",
|
||||
}
|
||||
elif intent_result := await agent.async_recognize_intent(user_input):
|
||||
successful_match = not intent_result.unmatched_entities
|
||||
result_dict = {
|
||||
# Name of the matching intent (or the closest)
|
||||
"intent": {
|
||||
"name": intent_result.intent.name,
|
||||
},
|
||||
# Slot values that would be received by the intent
|
||||
"slots": { # direct access to values
|
||||
entity_key: entity.text or entity.value
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Extra slot details, such as the originally matched text
|
||||
"details": {
|
||||
entity_key: {
|
||||
"name": entity.name,
|
||||
"value": entity.value,
|
||||
"text": entity.text,
|
||||
}
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Entities/areas/etc. that would be targeted
|
||||
"targets": {},
|
||||
# True if match was successful
|
||||
"match": successful_match,
|
||||
# Text of the sentence template that matched (or was closest)
|
||||
"sentence_template": "",
|
||||
# When match is incomplete, this will contain the best slot guesses
|
||||
"unmatched_slots": _get_unmatched_slots(intent_result),
|
||||
# True if match was not exact
|
||||
"fuzzy_match": False,
|
||||
}
|
||||
|
||||
if successful_match:
|
||||
result_dict["targets"] = {
|
||||
state.entity_id: {"matched": is_matched}
|
||||
for state, is_matched in _get_debug_targets(hass, intent_result)
|
||||
}
|
||||
|
||||
if intent_result.intent_sentence is not None:
|
||||
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
||||
|
||||
if intent_result.intent_metadata:
|
||||
# Inspect metadata to determine if this matched a custom sentence
|
||||
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
||||
result_dict["source"] = "custom"
|
||||
result_dict["file"] = intent_result.intent_metadata.get(
|
||||
METADATA_CUSTOM_FILE
|
||||
)
|
||||
else:
|
||||
result_dict["source"] = "builtin"
|
||||
|
||||
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
||||
METADATA_FUZZY_MATCH, False
|
||||
)
|
||||
|
||||
result_dicts.append(result_dict)
|
||||
|
||||
connection.send_result(msg["id"], {"results": result_dicts})
|
||||
|
||||
|
||||
def _get_debug_targets(
|
||||
hass: HomeAssistant,
|
||||
result: RecognizeResult,
|
||||
) -> Iterable[tuple[State, bool]]:
|
||||
"""Yield state/is_matched pairs for a hassil recognition."""
|
||||
entities = result.entities
|
||||
|
||||
name: str | None = None
|
||||
area_name: str | None = None
|
||||
domains: set[str] | None = None
|
||||
device_classes: set[str] | None = None
|
||||
state_names: set[str] | None = None
|
||||
|
||||
if "name" in entities:
|
||||
name = str(entities["name"].value)
|
||||
|
||||
if "area" in entities:
|
||||
area_name = str(entities["area"].value)
|
||||
|
||||
if "domain" in entities:
|
||||
domains = set(cv.ensure_list(entities["domain"].value))
|
||||
|
||||
if "device_class" in entities:
|
||||
device_classes = set(cv.ensure_list(entities["device_class"].value))
|
||||
|
||||
if "state" in entities:
|
||||
# HassGetState only
|
||||
state_names = set(cv.ensure_list(entities["state"].value))
|
||||
|
||||
if (
|
||||
(name is None)
|
||||
and (area_name is None)
|
||||
and (not domains)
|
||||
and (not device_classes)
|
||||
and (not state_names)
|
||||
):
|
||||
# Avoid "matching" all entities when there is no filter
|
||||
return
|
||||
|
||||
states = intent.async_match_states(
|
||||
hass,
|
||||
name=name,
|
||||
area_name=area_name,
|
||||
domains=domains,
|
||||
device_classes=device_classes,
|
||||
)
|
||||
|
||||
for state in states:
|
||||
# For queries, a target is "matched" based on its state
|
||||
is_matched = (state_names is None) or (state.state in state_names)
|
||||
yield state, is_matched
|
||||
|
||||
|
||||
def _get_unmatched_slots(
|
||||
result: RecognizeResult,
|
||||
) -> dict[str, str | int | float]:
|
||||
"""Return a dict of unmatched text/range slot entities."""
|
||||
unmatched_slots: dict[str, str | int | float] = {}
|
||||
for entity in result.unmatched_entities_list:
|
||||
if isinstance(entity, UnmatchedTextEntity):
|
||||
if entity.text == MISSING_ENTITY:
|
||||
# Don't report <missing> since these are just missing context
|
||||
# slots.
|
||||
continue
|
||||
|
||||
unmatched_slots[entity.name] = entity.text
|
||||
elif isinstance(entity, UnmatchedRangeEntity):
|
||||
unmatched_slots[entity.name] = entity.value
|
||||
|
||||
return unmatched_slots
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/agent/homeassistant/language_scores",
|
||||
@@ -216,13 +364,10 @@ async def websocket_hass_agent_language_scores(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get support scores per language."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
language = msg.get("language", hass.config.language)
|
||||
country = msg.get("country", hass.config.country)
|
||||
|
||||
scores = await agent.async_get_language_scores()
|
||||
scores = await hass.async_add_executor_job(get_language_scores)
|
||||
matching_langs = language_util.matches(language, scores.keys(), country=country)
|
||||
preferred_lang = matching_langs[0] if matching_langs else language
|
||||
result = {
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.10.1"]
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.24"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pycync==0.4.1"]
|
||||
"requirements": ["pycync==0.4.0"]
|
||||
}
|
||||
|
@@ -23,7 +23,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.util.ssl import client_context_no_verify
|
||||
|
||||
from .const import KEY_MAC, TIMEOUT_SEC
|
||||
from .const import KEY_MAC, TIMEOUT
|
||||
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -42,7 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
|
||||
session = async_get_clientsession(hass)
|
||||
host = conf[CONF_HOST]
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT_SEC):
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
device: Appliance = await DaikinFactory(
|
||||
host,
|
||||
session,
|
||||
@@ -53,7 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
|
||||
)
|
||||
_LOGGER.debug("Connection to %s successful", host)
|
||||
except TimeoutError as err:
|
||||
_LOGGER.debug("Connection to %s timed out in %s seconds", host, TIMEOUT_SEC)
|
||||
_LOGGER.debug("Connection to %s timed out in 60 seconds", host)
|
||||
raise ConfigEntryNotReady from err
|
||||
except ClientConnectionError as err:
|
||||
_LOGGER.debug("ClientConnectionError to %s", host)
|
||||
|
@@ -20,7 +20,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.ssl import client_context_no_verify
|
||||
|
||||
from .const import DOMAIN, KEY_MAC, TIMEOUT_SEC
|
||||
from .const import DOMAIN, KEY_MAC, TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -84,7 +84,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
password = None
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT_SEC):
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
device: Appliance = await DaikinFactory(
|
||||
host,
|
||||
async_get_clientsession(self.hass),
|
||||
|
@@ -24,4 +24,4 @@ ATTR_STATE_OFF = "off"
|
||||
KEY_MAC = "mac"
|
||||
KEY_IP = "ip"
|
||||
|
||||
TIMEOUT_SEC = 120
|
||||
TIMEOUT = 60
|
||||
|
@@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, TIMEOUT_SEC
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,7 +28,7 @@ class DaikinCoordinator(DataUpdateCoordinator[None]):
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=device.values.get("name", DOMAIN),
|
||||
update_interval=timedelta(seconds=TIMEOUT_SEC),
|
||||
update_interval=timedelta(seconds=60),
|
||||
)
|
||||
self.device = device
|
||||
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydaikin"],
|
||||
"requirements": ["pydaikin==2.17.1"],
|
||||
"requirements": ["pydaikin==2.16.0"],
|
||||
"zeroconf": ["_dkapi._tcp.local."]
|
||||
}
|
||||
|
@@ -126,7 +126,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
|
||||
self._attr_translation_key = "button"
|
||||
self._attr_translation_placeholders = {"key": str(key)}
|
||||
|
||||
def sync_callback(self, message: tuple) -> None:
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the binary sensor state."""
|
||||
if (
|
||||
message[0] == self._remote_control_property.element_uid
|
||||
|
@@ -48,6 +48,7 @@ class DevoloDeviceEntity(Entity):
|
||||
)
|
||||
|
||||
self.subscriber: Subscriber | None = None
|
||||
self.sync_callback = self._sync
|
||||
|
||||
self._value: float
|
||||
|
||||
@@ -68,7 +69,7 @@ class DevoloDeviceEntity(Entity):
|
||||
self._device_instance.uid, self.subscriber
|
||||
)
|
||||
|
||||
def sync_callback(self, message: tuple) -> None:
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the state."""
|
||||
if message[0] == self._attr_unique_id:
|
||||
self._value = message[1]
|
||||
|
@@ -185,7 +185,7 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity):
|
||||
"""
|
||||
return f"{self._attr_unique_id}_{self._sensor_type}"
|
||||
|
||||
def sync_callback(self, message: tuple) -> None:
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the consumption sensor state."""
|
||||
if message[0] == self._attr_unique_id:
|
||||
self._value = getattr(
|
||||
|
@@ -13,3 +13,8 @@ class Subscriber:
|
||||
"""Initiate the subscriber."""
|
||||
self.name = name
|
||||
self.callback = callback
|
||||
|
||||
def update(self, message: str) -> None:
|
||||
"""Trigger hass to update the device."""
|
||||
_LOGGER.debug('%s got message "%s"', self.name, message)
|
||||
self.callback(message)
|
||||
|
@@ -64,7 +64,7 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
|
||||
"""Switch off the device."""
|
||||
self._binary_switch_property.set(state=False)
|
||||
|
||||
def sync_callback(self, message: tuple) -> None:
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the binary switch state and consumption."""
|
||||
if message[0].startswith("devolo.BinarySwitch"):
|
||||
self._attr_is_on = self._device_instance.binary_switch_property[
|
||||
|
@@ -17,6 +17,6 @@
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.2.1",
|
||||
"aiodiscover==2.7.1",
|
||||
"cached-ipaddress==1.0.1"
|
||||
"cached-ipaddress==0.10.0"
|
||||
]
|
||||
}
|
||||
|
@@ -116,9 +116,6 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"active_map": {
|
||||
"default": "mdi:floor-plan"
|
||||
},
|
||||
"water_amount": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
|
@@ -2,13 +2,12 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import Any
|
||||
|
||||
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
||||
from deebot_client.capabilities import CapabilitySetTypes
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import WorkModeEvent
|
||||
from deebot_client.events.base import Event
|
||||
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
||||
from deebot_client.events.water_info import WaterAmountEvent
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
@@ -17,11 +16,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import EcovacsConfigEntry
|
||||
from .entity import (
|
||||
EcovacsCapabilityEntityDescription,
|
||||
EcovacsDescriptionEntity,
|
||||
EcovacsEntity,
|
||||
)
|
||||
from .entity import EcovacsCapabilityEntityDescription, EcovacsDescriptionEntity
|
||||
from .util import get_name_key, get_supported_entities
|
||||
|
||||
|
||||
@@ -71,12 +66,6 @@ async def async_setup_entry(
|
||||
entities = get_supported_entities(
|
||||
controller, EcovacsSelectEntity, ENTITY_DESCRIPTIONS
|
||||
)
|
||||
entities.extend(
|
||||
EcovacsActiveMapSelectEntity(device, device.capabilities.map)
|
||||
for device in controller.devices
|
||||
if (map_cap := device.capabilities.map)
|
||||
and isinstance(map_cap.major, CapabilitySet)
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -114,76 +103,3 @@ class EcovacsSelectEntity[EventT: Event](
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self._device.execute_command(self._capability.set(option))
|
||||
|
||||
|
||||
class EcovacsActiveMapSelectEntity(
|
||||
EcovacsEntity[CapabilityMap],
|
||||
SelectEntity,
|
||||
):
|
||||
"""Ecovacs active map select entity."""
|
||||
|
||||
entity_description = SelectEntityDescription(
|
||||
key="active_map",
|
||||
translation_key="active_map",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
capability: CapabilityMap,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(device, capability, **kwargs)
|
||||
self._option_to_id: dict[str, str] = {}
|
||||
self._id_to_option: dict[str, str] = {}
|
||||
|
||||
self._handle_on_cached_map(
|
||||
device.events.get_last_event(CachedMapInfoEvent)
|
||||
or CachedMapInfoEvent(set())
|
||||
)
|
||||
|
||||
def _handle_on_cached_map(self, event: CachedMapInfoEvent) -> None:
|
||||
self._id_to_option.clear()
|
||||
self._option_to_id.clear()
|
||||
|
||||
for map_info in event.maps:
|
||||
name = map_info.name if map_info.name else map_info.id
|
||||
self._id_to_option[map_info.id] = name
|
||||
self._option_to_id[name] = map_info.id
|
||||
|
||||
if map_info.using:
|
||||
self._attr_current_option = name
|
||||
|
||||
if self._attr_current_option not in self._option_to_id:
|
||||
self._attr_current_option = None
|
||||
|
||||
# Sort named maps first, then numeric IDs (unnamed maps during building) in ascending order.
|
||||
self._attr_options = sorted(
|
||||
self._option_to_id.keys(), key=lambda x: (x.isdigit(), x.lower())
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Set up the event listeners now that hass is ready."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_cached_map(event: CachedMapInfoEvent) -> None:
|
||||
self._handle_on_cached_map(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.cached_info.event, on_cached_map)
|
||||
|
||||
async def on_major_map(event: MajorMapEvent) -> None:
|
||||
self._attr_current_option = self._id_to_option.get(event.map_id)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.major.event, on_major_map)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self._capability.major, CapabilitySet)
|
||||
await self._device.execute_command(
|
||||
self._capability.major.set(self._option_to_id[option])
|
||||
)
|
||||
|
@@ -178,9 +178,6 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"active_map": {
|
||||
"name": "Active map"
|
||||
},
|
||||
"water_amount": {
|
||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||
"state": {
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.4.0"],
|
||||
"requirements": ["pyenphase==2.3.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
@@ -396,7 +396,6 @@ class EnvoyCTSensorEntityDescription(SensorEntityDescription):
|
||||
int | float | str | CtType | CtMeterStatus | CtStatusFlags | CtState | None,
|
||||
]
|
||||
on_phase: str | None
|
||||
cttype: str | None = None
|
||||
|
||||
|
||||
CT_NET_CONSUMPTION_SENSORS = (
|
||||
@@ -410,7 +409,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_delivered"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="lifetime_net_production",
|
||||
@@ -422,7 +420,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_received"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_consumption",
|
||||
@@ -434,7 +431,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("active_power"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="frequency",
|
||||
@@ -446,7 +442,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("frequency"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="voltage",
|
||||
@@ -459,7 +454,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("voltage"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_ct_current",
|
||||
@@ -472,7 +466,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("current"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_ct_powerfactor",
|
||||
@@ -483,7 +476,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("power_factor"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_consumption_ct_metering_status",
|
||||
@@ -494,7 +486,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("metering_status"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_consumption_ct_status_flags",
|
||||
@@ -504,7 +495,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -535,7 +525,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("frequency"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_voltage",
|
||||
@@ -548,7 +537,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("voltage"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_current",
|
||||
@@ -561,7 +549,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("current"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_powerfactor",
|
||||
@@ -572,7 +559,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("power_factor"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_metering_status",
|
||||
@@ -583,7 +569,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("metering_status"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_status_flags",
|
||||
@@ -593,7 +578,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -623,7 +607,6 @@ CT_STORAGE_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_delivered"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="lifetime_battery_charged",
|
||||
@@ -635,7 +618,6 @@ CT_STORAGE_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_received"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="battery_discharge",
|
||||
@@ -647,7 +629,6 @@ CT_STORAGE_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("active_power"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_frequency",
|
||||
@@ -659,7 +640,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("frequency"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_voltage",
|
||||
@@ -672,7 +652,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("voltage"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_current",
|
||||
@@ -685,7 +664,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("current"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_powerfactor",
|
||||
@@ -696,7 +674,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("power_factor"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_metering_status",
|
||||
@@ -707,7 +684,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("metering_status"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_status_flags",
|
||||
@@ -717,7 +693,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1040,31 +1015,50 @@ async def async_setup_entry(
|
||||
for description in NET_CONSUMPTION_PHASE_SENSORS[use_phase]
|
||||
if phase is not None
|
||||
)
|
||||
# Add Current Transformer entities
|
||||
if envoy_data.ctmeters:
|
||||
# Add net consumption CT entities
|
||||
if ctmeter := envoy_data.ctmeter_consumption:
|
||||
entities.extend(
|
||||
EnvoyCTEntity(coordinator, description)
|
||||
for sensors in (
|
||||
CT_NET_CONSUMPTION_SENSORS,
|
||||
CT_PRODUCTION_SENSORS,
|
||||
CT_STORAGE_SENSORS,
|
||||
EnvoyConsumptionCTEntity(coordinator, description)
|
||||
for description in CT_NET_CONSUMPTION_SENSORS
|
||||
if ctmeter.measurement_type == CtType.NET_CONSUMPTION
|
||||
)
|
||||
for description in sensors
|
||||
if description.cttype in envoy_data.ctmeters
|
||||
)
|
||||
# Add Current Transformer phase entities
|
||||
if ctmeters_phases := envoy_data.ctmeters_phases:
|
||||
# For each net consumption ct phase reported add net consumption entities
|
||||
if phase_data := envoy_data.ctmeter_consumption_phases:
|
||||
entities.extend(
|
||||
EnvoyCTPhaseEntity(coordinator, description)
|
||||
for sensors in (
|
||||
CT_NET_CONSUMPTION_PHASE_SENSORS,
|
||||
CT_PRODUCTION_PHASE_SENSORS,
|
||||
CT_STORAGE_PHASE_SENSORS,
|
||||
EnvoyConsumptionCTPhaseEntity(coordinator, description)
|
||||
for use_phase, phase in phase_data.items()
|
||||
for description in CT_NET_CONSUMPTION_PHASE_SENSORS[use_phase]
|
||||
if phase.measurement_type == CtType.NET_CONSUMPTION
|
||||
)
|
||||
for phase, descriptions in sensors.items()
|
||||
for description in descriptions
|
||||
if (cttype := description.cttype) in ctmeters_phases
|
||||
and phase in ctmeters_phases[cttype]
|
||||
# Add production CT entities
|
||||
if ctmeter := envoy_data.ctmeter_production:
|
||||
entities.extend(
|
||||
EnvoyProductionCTEntity(coordinator, description)
|
||||
for description in CT_PRODUCTION_SENSORS
|
||||
if ctmeter.measurement_type == CtType.PRODUCTION
|
||||
)
|
||||
# For each production ct phase reported add production ct entities
|
||||
if phase_data := envoy_data.ctmeter_production_phases:
|
||||
entities.extend(
|
||||
EnvoyProductionCTPhaseEntity(coordinator, description)
|
||||
for use_phase, phase in phase_data.items()
|
||||
for description in CT_PRODUCTION_PHASE_SENSORS[use_phase]
|
||||
if phase.measurement_type == CtType.PRODUCTION
|
||||
)
|
||||
# Add storage CT entities
|
||||
if ctmeter := envoy_data.ctmeter_storage:
|
||||
entities.extend(
|
||||
EnvoyStorageCTEntity(coordinator, description)
|
||||
for description in CT_STORAGE_SENSORS
|
||||
if ctmeter.measurement_type == CtType.STORAGE
|
||||
)
|
||||
# For each storage ct phase reported add storage ct entities
|
||||
if phase_data := envoy_data.ctmeter_storage_phases:
|
||||
entities.extend(
|
||||
EnvoyStorageCTPhaseEntity(coordinator, description)
|
||||
for use_phase, phase in phase_data.items()
|
||||
for description in CT_STORAGE_PHASE_SENSORS[use_phase]
|
||||
if phase.measurement_type == CtType.STORAGE
|
||||
)
|
||||
|
||||
if envoy_data.inverters:
|
||||
@@ -1251,8 +1245,8 @@ class EnvoyNetConsumptionPhaseEntity(EnvoySystemSensorEntity):
|
||||
return self.entity_description.value_fn(system_net_consumption)
|
||||
|
||||
|
||||
class EnvoyCTEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy CT entity."""
|
||||
class EnvoyConsumptionCTEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net consumption CT entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@@ -1261,13 +1255,13 @@ class EnvoyCTEntity(EnvoySystemSensorEntity):
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT sensor."""
|
||||
if (cttype := self.entity_description.cttype) not in self.data.ctmeters:
|
||||
if (ctmeter := self.data.ctmeter_consumption) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(self.data.ctmeters[cttype])
|
||||
return self.entity_description.value_fn(ctmeter)
|
||||
|
||||
|
||||
class EnvoyCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy CT phase entity."""
|
||||
class EnvoyConsumptionCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net consumption CT phase entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@@ -1278,14 +1272,78 @@ class EnvoyCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Return the state of the CT phase sensor."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.entity_description.on_phase
|
||||
if (cttype := self.entity_description.cttype) not in self.data.ctmeters_phases:
|
||||
return None
|
||||
if (phase := self.entity_description.on_phase) not in self.data.ctmeters_phases[
|
||||
cttype
|
||||
]:
|
||||
if (ctmeter := self.data.ctmeter_consumption_phases) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(
|
||||
self.data.ctmeters_phases[cttype][phase]
|
||||
ctmeter[self.entity_description.on_phase]
|
||||
)
|
||||
|
||||
|
||||
class EnvoyProductionCTEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net consumption CT entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT sensor."""
|
||||
if (ctmeter := self.data.ctmeter_production) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(ctmeter)
|
||||
|
||||
|
||||
class EnvoyProductionCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net consumption CT phase entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT phase sensor."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.entity_description.on_phase
|
||||
if (ctmeter := self.data.ctmeter_production_phases) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(
|
||||
ctmeter[self.entity_description.on_phase]
|
||||
)
|
||||
|
||||
|
||||
class EnvoyStorageCTEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net storage CT entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT sensor."""
|
||||
if (ctmeter := self.data.ctmeter_storage) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(ctmeter)
|
||||
|
||||
|
||||
class EnvoyStorageCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net storage CT phase entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT phase sensor."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.entity_description.on_phase
|
||||
if (ctmeter := self.data.ctmeter_storage_phases) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(
|
||||
ctmeter[self.entity_description.on_phase]
|
||||
)
|
||||
|
||||
|
||||
|
@@ -1,11 +0,0 @@
|
||||
"""Analytics platform."""
|
||||
|
||||
from homeassistant.components.analytics import AnalyticsInput, AnalyticsModifications
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
async def async_modify_analytics(
|
||||
hass: HomeAssistant, analytics_input: AnalyticsInput
|
||||
) -> AnalyticsModifications:
|
||||
"""Modify the analytics."""
|
||||
return AnalyticsModifications(remove=True)
|
@@ -22,23 +22,19 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_ESPHOME,
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
FlowType,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow, FlowResultType
|
||||
from homeassistant.helpers import discovery_flow
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
from homeassistant.helpers.service_info.esphome import ESPHomeServiceInfo
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
from homeassistant.helpers.service_info.mqtt import MqttServiceInfo
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
@@ -79,7 +75,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize flow."""
|
||||
self._host: str | None = None
|
||||
self._connected_address: str | None = None
|
||||
self.__name: str | None = None
|
||||
self._port: int | None = None
|
||||
self._password: str | None = None
|
||||
@@ -503,55 +498,18 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
await self.hass.config_entries.async_remove(
|
||||
self._entry_with_name_conflict.entry_id
|
||||
)
|
||||
return await self._async_create_entry()
|
||||
return self._async_create_entry()
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
@callback
|
||||
def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create the config entry."""
|
||||
assert self._name is not None
|
||||
assert self._device_info is not None
|
||||
|
||||
# Check if Z-Wave capabilities are present and start discovery flow
|
||||
next_flow_id: str | None = None
|
||||
if self._device_info.zwave_proxy_feature_flags:
|
||||
assert self._connected_address is not None
|
||||
assert self._port is not None
|
||||
|
||||
# Start Z-Wave discovery flow and get the flow ID
|
||||
zwave_result = await self.hass.config_entries.flow.async_init(
|
||||
"zwave_js",
|
||||
context={
|
||||
"source": SOURCE_ESPHOME,
|
||||
"discovery_key": discovery_flow.DiscoveryKey(
|
||||
domain=DOMAIN,
|
||||
key=self._device_info.mac_address,
|
||||
version=1,
|
||||
),
|
||||
},
|
||||
data=ESPHomeServiceInfo(
|
||||
name=self._device_info.name,
|
||||
zwave_home_id=self._device_info.zwave_home_id or None,
|
||||
ip_address=self._connected_address,
|
||||
port=self._port,
|
||||
noise_psk=self._noise_psk,
|
||||
),
|
||||
)
|
||||
if zwave_result["type"] in (
|
||||
FlowResultType.ABORT,
|
||||
FlowResultType.CREATE_ENTRY,
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"Unable to continue created Z-Wave JS config flow: %s", zwave_result
|
||||
)
|
||||
else:
|
||||
next_flow_id = zwave_result["flow_id"]
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self._name,
|
||||
data=self._async_make_config_data(),
|
||||
options={
|
||||
CONF_ALLOW_SERVICE_CALLS: DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS,
|
||||
},
|
||||
next_flow=(FlowType.CONFIG_FLOW, next_flow_id) if next_flow_id else None,
|
||||
)
|
||||
|
||||
@callback
|
||||
@@ -598,7 +556,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if entry.data.get(CONF_DEVICE_NAME) == self._device_name:
|
||||
self._entry_with_name_conflict = entry
|
||||
return await self.async_step_name_conflict()
|
||||
return await self._async_create_entry()
|
||||
return self._async_create_entry()
|
||||
|
||||
async def _async_reauth_validated_connection(self) -> ConfigFlowResult:
|
||||
"""Handle reauth validated connection."""
|
||||
@@ -745,7 +703,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
await cli.connect()
|
||||
self._device_info = await cli.device_info()
|
||||
self._connected_address = cli.connected_address
|
||||
except InvalidAuthAPIError:
|
||||
return ERROR_INVALID_PASSWORD_AUTH
|
||||
except RequiresEncryptionAPIError:
|
||||
|
@@ -17,9 +17,9 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.12.0",
|
||||
"aioesphomeapi==41.11.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -85,48 +84,6 @@ class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth when Firefly III API authentication fails."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth: ask for a new API key and validate."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
if user_input is not None:
|
||||
try:
|
||||
await _validate_input(
|
||||
self.hass,
|
||||
data={
|
||||
**reauth_entry.data,
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
},
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except FireflyClientTimeout:
|
||||
errors["base"] = "timeout_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_API_KEY: user_input[CONF_API_KEY]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
@@ -18,7 +18,7 @@ from pyfirefly.models import Account, Bill, Budget, Category, Currency
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -72,7 +72,7 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
|
||||
try:
|
||||
await self.firefly.get_about()
|
||||
except FireflyAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
@@ -109,7 +109,7 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
|
||||
budgets = await self.firefly.get_budgets()
|
||||
bills = await self.firefly.get_bills()
|
||||
except FireflyAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/firefly_iii",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.6"]
|
||||
"requirements": ["pyfirefly==0.1.5"]
|
||||
}
|
||||
|
@@ -100,6 +100,15 @@ class FireflyAccountEntity(FireflyBaseEntity, SensorEntity):
|
||||
"""Return the state of the sensor."""
|
||||
return self._account.attributes.current_balance
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str] | None:
|
||||
"""Return extra state attributes for the account entity."""
|
||||
return {
|
||||
"account_role": self._account.attributes.account_role or "",
|
||||
"account_type": self._account.attributes.type or "",
|
||||
"current_balance": str(self._account.attributes.current_balance or ""),
|
||||
}
|
||||
|
||||
|
||||
class FireflyCategoryEntity(FireflyBaseEntity, SensorEntity):
|
||||
"""Entity for Firefly III category."""
|
||||
|
@@ -13,15 +13,6 @@
|
||||
"verify_ssl": "Verify the SSL certificate of the Firefly instance"
|
||||
},
|
||||
"description": "You can create an API key in the Firefly UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The new API access token for authenticating with Firefly III"
|
||||
},
|
||||
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -31,8 +22,7 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -67,7 +67,7 @@ def suitable_nextchange_time(device: FritzhomeDevice) -> bool:
|
||||
|
||||
def suitable_temperature(device: FritzhomeDevice) -> bool:
|
||||
"""Check suitablity for temperature sensor."""
|
||||
return bool(device.has_temperature_sensor)
|
||||
return device.has_temperature_sensor and not device.has_thermostat
|
||||
|
||||
|
||||
def entity_category_temperature(device: FritzhomeDevice) -> EntityCategory | None:
|
||||
|
@@ -452,10 +452,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
||||
|
||||
async_register_built_in_panel(hass, "light")
|
||||
async_register_built_in_panel(hass, "security")
|
||||
async_register_built_in_panel(hass, "climate")
|
||||
|
||||
async_register_built_in_panel(hass, "profile")
|
||||
|
||||
async_register_built_in_panel(
|
||||
|
@@ -54,7 +54,7 @@ async def async_setup_entry(
|
||||
except aiohttp.ClientResponseError as err:
|
||||
if 400 <= err.status < 500:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="reauth_required"
|
||||
"OAuth session is not valid, reauth required"
|
||||
) from err
|
||||
raise ConfigEntryNotReady from err
|
||||
except aiohttp.ClientError as err:
|
||||
@@ -76,6 +76,10 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleAssistantSDKConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
||||
hass.services.async_remove(DOMAIN, service_name)
|
||||
|
||||
conversation.async_unset_agent(hass, entry)
|
||||
|
||||
return True
|
||||
|
@@ -26,7 +26,7 @@ from homeassistant.components.media_player import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
@@ -68,13 +68,7 @@ async def async_send_text_commands(
|
||||
) -> list[CommandResponse]:
|
||||
"""Send text commands to Google Assistant Service."""
|
||||
# There can only be 1 entry (config_flow has single_instance_allowed)
|
||||
entries = hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
if not entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_loaded",
|
||||
)
|
||||
entry: GoogleAssistantSDKConfigEntry = entries[0]
|
||||
entry: GoogleAssistantSDKConfigEntry = hass.config_entries.async_entries(DOMAIN)[0]
|
||||
|
||||
session = entry.runtime_data.session
|
||||
try:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Services for the Google Assistant SDK integration."""
|
||||
"""Support for Google Assistant SDK."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@@ -59,20 +59,14 @@
|
||||
},
|
||||
"media_player": {
|
||||
"name": "Media player entity",
|
||||
"description": "Name(s) of media player entities to play the Google Assistant's audio response on. This does not target the device for the command itself."
|
||||
"description": "Name(s) of media player entities to play response on."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"entry_not_loaded": {
|
||||
"message": "Entry not loaded"
|
||||
},
|
||||
"grpc_error": {
|
||||
"message": "Failed to communicate with Google Assistant"
|
||||
},
|
||||
"reauth_required": {
|
||||
"message": "Credentials are invalid, re-authentication required"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -22,7 +22,6 @@ from homeassistant.exceptions import (
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
_UPLOAD_AND_DOWNLOAD_TIMEOUT = 12 * 3600
|
||||
_UPLOAD_MAX_RETRIES = 20
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -151,7 +150,6 @@ class DriveClient:
|
||||
backup_metadata,
|
||||
open_stream,
|
||||
backup.size,
|
||||
max_retries=_UPLOAD_MAX_RETRIES,
|
||||
timeout=ClientTimeout(total=_UPLOAD_AND_DOWNLOAD_TIMEOUT),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
|
@@ -456,7 +456,6 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self.default_model = default_model
|
||||
self._attr_name = subentry.title
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
@@ -490,7 +489,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = options.get(CONF_CHAT_MODEL, self.default_model)
|
||||
model_name = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
@@ -621,13 +620,6 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
def create_generate_content_config(self) -> GenerateContentConfig:
|
||||
"""Create the GenerateContentConfig for the LLM."""
|
||||
options = self.subentry.data
|
||||
model = options.get(CONF_CHAT_MODEL, self.default_model)
|
||||
thinking_config: ThinkingConfig | None = None
|
||||
if model.startswith("models/gemini-2.5") and not model.endswith(
|
||||
("tts", "image", "image-preview")
|
||||
):
|
||||
thinking_config = ThinkingConfig(include_thoughts=True)
|
||||
|
||||
return GenerateContentConfig(
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
top_k=options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
@@ -660,7 +652,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
),
|
||||
),
|
||||
],
|
||||
thinking_config=thinking_config,
|
||||
thinking_config=ThinkingConfig(include_thoughts=True),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -22,7 +22,6 @@ from google.protobuf import timestamp_pb2
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -92,16 +91,6 @@ def convert_time(time_str: str) -> timestamp_pb2.Timestamp | None:
|
||||
return timestamp
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = [
|
||||
SensorEntityDescription(
|
||||
key="duration",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -116,20 +105,20 @@ async def async_setup_entry(
|
||||
client_options = ClientOptions(api_key=api_key)
|
||||
client = RoutesAsyncClient(client_options=client_options)
|
||||
|
||||
sensors = [
|
||||
GoogleTravelTimeSensor(
|
||||
config_entry, name, api_key, origin, destination, client, sensor_description
|
||||
sensor = GoogleTravelTimeSensor(
|
||||
config_entry, name, api_key, origin, destination, client
|
||||
)
|
||||
for sensor_description in SENSOR_DESCRIPTIONS
|
||||
]
|
||||
|
||||
async_add_entities(sensors, False)
|
||||
async_add_entities([sensor], False)
|
||||
|
||||
|
||||
class GoogleTravelTimeSensor(SensorEntity):
|
||||
"""Representation of a Google travel time sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_native_unit_of_measurement = UnitOfTime.MINUTES
|
||||
_attr_device_class = SensorDeviceClass.DURATION
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -139,10 +128,8 @@ class GoogleTravelTimeSensor(SensorEntity):
|
||||
origin: str,
|
||||
destination: str,
|
||||
client: RoutesAsyncClient,
|
||||
sensor_description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = sensor_description
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = config_entry.entry_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
@@ -1,18 +1,14 @@
|
||||
"""The Growatt server PV inverter sensor integration."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
|
||||
import growattServer
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
|
||||
from .const import (
|
||||
AUTH_API_TOKEN,
|
||||
AUTH_PASSWORD,
|
||||
CONF_AUTH_TYPE,
|
||||
CONF_PLANT_ID,
|
||||
DEFAULT_PLANT_ID,
|
||||
DEFAULT_URL,
|
||||
@@ -23,110 +19,36 @@ from .const import (
|
||||
from .coordinator import GrowattConfigEntry, GrowattCoordinator
|
||||
from .models import GrowattRuntimeData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_device_list_classic(
|
||||
def get_device_list(
|
||||
api: growattServer.GrowattApi, config: Mapping[str, str]
|
||||
) -> tuple[list[dict[str, str]], str]:
|
||||
"""Retrieve the device list for the selected plant."""
|
||||
plant_id = config[CONF_PLANT_ID]
|
||||
|
||||
# Log in to api and fetch first plant if no plant id is defined.
|
||||
try:
|
||||
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
# DEBUG: Log the actual response structure
|
||||
except Exception as ex:
|
||||
_LOGGER.error("DEBUG - Login response: %s", login_response)
|
||||
raise ConfigEntryError(
|
||||
f"Error communicating with Growatt API during login: {ex}"
|
||||
) from ex
|
||||
|
||||
if not login_response.get("success"):
|
||||
msg = login_response.get("msg", "Unknown error")
|
||||
_LOGGER.debug("Growatt login failed: %s", msg)
|
||||
if msg == LOGIN_INVALID_AUTH_CODE:
|
||||
raise ConfigEntryAuthFailed("Username, Password or URL may be incorrect!")
|
||||
raise ConfigEntryError(f"Growatt login failed: {msg}")
|
||||
|
||||
if (
|
||||
not login_response["success"]
|
||||
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
|
||||
):
|
||||
raise ConfigEntryError("Username, Password or URL may be incorrect!")
|
||||
user_id = login_response["user"]["id"]
|
||||
|
||||
if plant_id == DEFAULT_PLANT_ID:
|
||||
try:
|
||||
plant_info = api.plant_list(user_id)
|
||||
except Exception as ex:
|
||||
raise ConfigEntryError(
|
||||
f"Error communicating with Growatt API during plant list: {ex}"
|
||||
) from ex
|
||||
if not plant_info or "data" not in plant_info or not plant_info["data"]:
|
||||
raise ConfigEntryError("No plants found for this account.")
|
||||
plant_id = plant_info["data"][0]["plantId"]
|
||||
|
||||
# Get a list of devices for specified plant to add sensors for.
|
||||
try:
|
||||
devices = api.device_list(plant_id)
|
||||
except Exception as ex:
|
||||
raise ConfigEntryError(
|
||||
f"Error communicating with Growatt API during device list: {ex}"
|
||||
) from ex
|
||||
|
||||
return devices, plant_id
|
||||
|
||||
|
||||
def get_device_list_v1(
|
||||
api, config: Mapping[str, str]
|
||||
) -> tuple[list[dict[str, str]], str]:
|
||||
"""Device list logic for Open API V1.
|
||||
|
||||
Note: Plant selection (including auto-selection if only one plant exists)
|
||||
is handled in the config flow before this function is called. This function
|
||||
only fetches devices for the already-selected plant_id.
|
||||
"""
|
||||
plant_id = config[CONF_PLANT_ID]
|
||||
try:
|
||||
devices_dict = api.device_list(plant_id)
|
||||
except growattServer.GrowattV1ApiError as e:
|
||||
raise ConfigEntryError(
|
||||
f"API error during device list: {e} (Code: {getattr(e, 'error_code', None)}, Message: {getattr(e, 'error_msg', None)})"
|
||||
) from e
|
||||
devices = devices_dict.get("devices", [])
|
||||
# Only MIN device (type = 7) support implemented in current V1 API
|
||||
supported_devices = [
|
||||
{
|
||||
"deviceSn": device.get("device_sn", ""),
|
||||
"deviceType": "min",
|
||||
}
|
||||
for device in devices
|
||||
if device.get("type") == 7
|
||||
]
|
||||
|
||||
for device in devices:
|
||||
if device.get("type") != 7:
|
||||
_LOGGER.warning(
|
||||
"Device %s with type %s not supported in Open API V1, skipping",
|
||||
device.get("device_sn", ""),
|
||||
device.get("type"),
|
||||
)
|
||||
return supported_devices, plant_id
|
||||
|
||||
|
||||
def get_device_list(
|
||||
api, config: Mapping[str, str], api_version: str
|
||||
) -> tuple[list[dict[str, str]], str]:
|
||||
"""Dispatch to correct device list logic based on API version."""
|
||||
if api_version == "v1":
|
||||
return get_device_list_v1(api, config)
|
||||
if api_version == "classic":
|
||||
return get_device_list_classic(api, config)
|
||||
raise ConfigEntryError(f"Unknown API version: {api_version}")
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, config_entry: GrowattConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Growatt from a config entry."""
|
||||
|
||||
config = config_entry.data
|
||||
username = config[CONF_USERNAME]
|
||||
url = config.get(CONF_URL, DEFAULT_URL)
|
||||
|
||||
# If the URL has been deprecated then change to the default instead
|
||||
@@ -136,24 +58,11 @@ async def async_setup_entry(
|
||||
new_data[CONF_URL] = url
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
|
||||
# Determine API version
|
||||
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
|
||||
api_version = "v1"
|
||||
token = config[CONF_TOKEN]
|
||||
api = growattServer.OpenApiV1(token=token)
|
||||
elif config.get(CONF_AUTH_TYPE) == AUTH_PASSWORD:
|
||||
api_version = "classic"
|
||||
username = config[CONF_USERNAME]
|
||||
api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=username
|
||||
)
|
||||
# Initialise the library with the username & a random id each time it is started
|
||||
api = growattServer.GrowattApi(add_random_user_id=True, agent_identifier=username)
|
||||
api.server_url = url
|
||||
else:
|
||||
raise ConfigEntryError("Unknown authentication type in config entry.")
|
||||
|
||||
devices, plant_id = await hass.async_add_executor_job(
|
||||
get_device_list, api, config, api_version
|
||||
)
|
||||
devices, plant_id = await hass.async_add_executor_job(get_device_list, api, config)
|
||||
|
||||
# Create a coordinator for the total sensors
|
||||
total_coordinator = GrowattCoordinator(
|
||||
@@ -166,7 +75,7 @@ async def async_setup_entry(
|
||||
hass, config_entry, device["deviceSn"], device["deviceType"], plant_id
|
||||
)
|
||||
for device in devices
|
||||
if device["deviceType"] in ["inverter", "tlx", "storage", "mix", "min"]
|
||||
if device["deviceType"] in ["inverter", "tlx", "storage", "mix"]
|
||||
}
|
||||
|
||||
# Perform the first refresh for the total coordinator
|
||||
|
@@ -1,38 +1,22 @@
|
||||
"""Config flow for growatt server integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import growattServer
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import (
|
||||
ABORT_NO_PLANTS,
|
||||
AUTH_API_TOKEN,
|
||||
AUTH_PASSWORD,
|
||||
CONF_AUTH_TYPE,
|
||||
CONF_PLANT_ID,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
ERROR_CANNOT_CONNECT,
|
||||
ERROR_INVALID_AUTH,
|
||||
LOGIN_INVALID_AUTH_CODE,
|
||||
SERVER_URLS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow class."""
|
||||
@@ -43,98 +27,12 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialise growatt server flow."""
|
||||
self.user_id: str | None = None
|
||||
self.user_id = None
|
||||
self.data: dict[str, Any] = {}
|
||||
self.auth_type: str | None = None
|
||||
self.plants: list[dict[str, Any]] = []
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the start of the config flow."""
|
||||
return self.async_show_menu(
|
||||
step_id="user",
|
||||
menu_options=["password_auth", "token_auth"],
|
||||
)
|
||||
|
||||
async def async_step_password_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle username/password authentication."""
|
||||
if user_input is None:
|
||||
return self._async_show_password_form()
|
||||
|
||||
self.auth_type = AUTH_PASSWORD
|
||||
|
||||
# Traditional username/password authentication
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
|
||||
)
|
||||
self.api.server_url = user_input[CONF_URL]
|
||||
|
||||
try:
|
||||
login_response = await self.hass.async_add_executor_job(
|
||||
self.api.login, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except requests.exceptions.RequestException as ex:
|
||||
_LOGGER.error("Network error during Growatt API login: %s", ex)
|
||||
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
|
||||
except (ValueError, KeyError, TypeError, AttributeError) as ex:
|
||||
_LOGGER.error("Invalid response format during login: %s", ex)
|
||||
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
|
||||
|
||||
if (
|
||||
not login_response["success"]
|
||||
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
|
||||
):
|
||||
return self._async_show_password_form({"base": ERROR_INVALID_AUTH})
|
||||
|
||||
self.user_id = login_response["user"]["id"]
|
||||
self.data = user_input
|
||||
self.data[CONF_AUTH_TYPE] = self.auth_type
|
||||
return await self.async_step_plant()
|
||||
|
||||
async def async_step_token_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle API token authentication."""
|
||||
if user_input is None:
|
||||
return self._async_show_token_form()
|
||||
|
||||
self.auth_type = AUTH_API_TOKEN
|
||||
|
||||
# Using token authentication
|
||||
token = user_input[CONF_TOKEN]
|
||||
self.api = growattServer.OpenApiV1(token=token)
|
||||
|
||||
# Verify token by fetching plant list
|
||||
try:
|
||||
plant_response = await self.hass.async_add_executor_job(self.api.plant_list)
|
||||
self.plants = plant_response.get("plants", [])
|
||||
except requests.exceptions.RequestException as ex:
|
||||
_LOGGER.error("Network error during Growatt V1 API plant list: %s", ex)
|
||||
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
|
||||
except growattServer.GrowattV1ApiError as e:
|
||||
_LOGGER.error(
|
||||
"Growatt V1 API error: %s (Code: %s)",
|
||||
e.error_msg or str(e),
|
||||
getattr(e, "error_code", None),
|
||||
)
|
||||
return self._async_show_token_form({"base": ERROR_INVALID_AUTH})
|
||||
except (ValueError, KeyError, TypeError, AttributeError) as ex:
|
||||
_LOGGER.error(
|
||||
"Invalid response format during Growatt V1 API plant list: %s", ex
|
||||
)
|
||||
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
|
||||
self.data = user_input
|
||||
self.data[CONF_AUTH_TYPE] = self.auth_type
|
||||
return await self.async_step_plant()
|
||||
|
||||
@callback
|
||||
def _async_show_password_form(
|
||||
self, errors: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the username/password form to the user."""
|
||||
def _async_show_user_form(self, errors=None):
|
||||
"""Show the form to the user."""
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
@@ -144,87 +42,58 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="password_auth", data_schema=data_schema, errors=errors
|
||||
step_id="user", data_schema=data_schema, errors=errors
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_show_token_form(
|
||||
self, errors: dict[str, Any] | None = None
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the API token form to the user."""
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TOKEN): str,
|
||||
}
|
||||
"""Handle the start of the config flow."""
|
||||
if not user_input:
|
||||
return self._async_show_user_form()
|
||||
|
||||
# Initialise the library with the username & a random id each time it is started
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
|
||||
)
|
||||
self.api.server_url = user_input[CONF_URL]
|
||||
login_response = await self.hass.async_add_executor_job(
|
||||
self.api.login, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="token_auth",
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
)
|
||||
if (
|
||||
not login_response["success"]
|
||||
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
|
||||
):
|
||||
return self._async_show_user_form({"base": "invalid_auth"})
|
||||
self.user_id = login_response["user"]["id"]
|
||||
|
||||
self.data = user_input
|
||||
return await self.async_step_plant()
|
||||
|
||||
async def async_step_plant(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle adding a "plant" to Home Assistant."""
|
||||
if self.auth_type == AUTH_API_TOKEN:
|
||||
# Using V1 API with token
|
||||
if not self.plants:
|
||||
return self.async_abort(reason=ABORT_NO_PLANTS)
|
||||
|
||||
# Create dictionary of plant_id -> name
|
||||
plant_dict = {
|
||||
str(plant["plant_id"]): plant.get("name", "Unknown Plant")
|
||||
for plant in self.plants
|
||||
}
|
||||
|
||||
if user_input is None and len(plant_dict) > 1:
|
||||
data_schema = vol.Schema(
|
||||
{vol.Required(CONF_PLANT_ID): vol.In(plant_dict)}
|
||||
)
|
||||
return self.async_show_form(step_id="plant", data_schema=data_schema)
|
||||
|
||||
if user_input is None:
|
||||
# Single plant => mark it as selected
|
||||
user_input = {CONF_PLANT_ID: list(plant_dict.keys())[0]}
|
||||
|
||||
user_input[CONF_NAME] = plant_dict[user_input[CONF_PLANT_ID]]
|
||||
|
||||
else:
|
||||
# Traditional API
|
||||
try:
|
||||
plant_info = await self.hass.async_add_executor_job(
|
||||
self.api.plant_list, self.user_id
|
||||
)
|
||||
except requests.exceptions.RequestException as ex:
|
||||
_LOGGER.error("Network error during Growatt API plant list: %s", ex)
|
||||
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
|
||||
|
||||
# Access plant_info["data"] - validate response structure
|
||||
if not isinstance(plant_info, dict) or "data" not in plant_info:
|
||||
_LOGGER.error(
|
||||
"Invalid response format during plant list: missing 'data' key"
|
||||
)
|
||||
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
|
||||
if not plant_info["data"]:
|
||||
return self.async_abort(reason="no_plants")
|
||||
|
||||
plant_data = plant_info["data"]
|
||||
plants = {plant["plantId"]: plant["plantName"] for plant in plant_info["data"]}
|
||||
|
||||
if not plant_data:
|
||||
return self.async_abort(reason=ABORT_NO_PLANTS)
|
||||
|
||||
plants = {plant["plantId"]: plant["plantName"] for plant in plant_data}
|
||||
|
||||
if user_input is None and len(plant_data) > 1:
|
||||
if user_input is None and len(plant_info["data"]) > 1:
|
||||
data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)})
|
||||
|
||||
return self.async_show_form(step_id="plant", data_schema=data_schema)
|
||||
|
||||
if user_input is None:
|
||||
# single plant => mark it as selected
|
||||
user_input = {CONF_PLANT_ID: plant_data[0]["plantId"]}
|
||||
user_input = {CONF_PLANT_ID: plant_info["data"][0]["plantId"]}
|
||||
|
||||
user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]]
|
||||
|
||||
await self.async_set_unique_id(user_input[CONF_PLANT_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
self.data.update(user_input)
|
||||
|
@@ -4,16 +4,6 @@ from homeassistant.const import Platform
|
||||
|
||||
CONF_PLANT_ID = "plant_id"
|
||||
|
||||
|
||||
# API key support
|
||||
CONF_API_KEY = "api_key"
|
||||
|
||||
# Auth types for config flow
|
||||
AUTH_PASSWORD = "password"
|
||||
AUTH_API_TOKEN = "api_token"
|
||||
CONF_AUTH_TYPE = "auth_type"
|
||||
DEFAULT_AUTH_TYPE = AUTH_PASSWORD
|
||||
|
||||
DEFAULT_PLANT_ID = "0"
|
||||
|
||||
DEFAULT_NAME = "Growatt"
|
||||
@@ -39,10 +29,3 @@ DOMAIN = "growatt_server"
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
LOGIN_INVALID_AUTH_CODE = "502"
|
||||
|
||||
# Config flow error types (also used as abort reasons)
|
||||
ERROR_CANNOT_CONNECT = "cannot_connect" # Used for both form errors and aborts
|
||||
ERROR_INVALID_AUTH = "invalid_auth"
|
||||
|
||||
# Config flow abort reasons
|
||||
ABORT_NO_PLANTS = "no_plants"
|
||||
|
@@ -1,7 +1,5 @@
|
||||
"""Coordinator module for managing Growatt data fetching."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
@@ -40,30 +38,22 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
plant_id: str,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.api_version = (
|
||||
"v1" if config_entry.data.get("auth_type") == "api_token" else "classic"
|
||||
)
|
||||
self.device_id = device_id
|
||||
self.device_type = device_type
|
||||
self.plant_id = plant_id
|
||||
self.previous_values: dict[str, Any] = {}
|
||||
|
||||
if self.api_version == "v1":
|
||||
self.username = None
|
||||
self.password = None
|
||||
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
|
||||
self.token = config_entry.data["token"]
|
||||
self.api = growattServer.OpenApiV1(token=self.token)
|
||||
elif self.api_version == "classic":
|
||||
self.username = config_entry.data.get(CONF_USERNAME)
|
||||
self.username = config_entry.data[CONF_USERNAME]
|
||||
self.password = config_entry.data[CONF_PASSWORD]
|
||||
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=self.username
|
||||
)
|
||||
|
||||
# Set server URL
|
||||
self.api.server_url = self.url
|
||||
else:
|
||||
raise ValueError(f"Unknown API version: {self.api_version}")
|
||||
|
||||
self.device_id = device_id
|
||||
self.device_type = device_type
|
||||
self.plant_id = plant_id
|
||||
|
||||
# Initialize previous_values to store historical data
|
||||
self.previous_values: dict[str, Any] = {}
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
@@ -77,54 +67,21 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Update data via library synchronously."""
|
||||
_LOGGER.debug("Updating data for %s (%s)", self.device_id, self.device_type)
|
||||
|
||||
# login only required for classic API
|
||||
if self.api_version == "classic":
|
||||
# Login in to the Growatt server
|
||||
self.api.login(self.username, self.password)
|
||||
|
||||
if self.device_type == "total":
|
||||
if self.api_version == "v1":
|
||||
# The V1 Plant APIs do not provide the same information as the classic plant_info() API
|
||||
# More specifically:
|
||||
# 1. There is no monetary information to be found, so today and lifetime money is not available
|
||||
# 2. There is no nominal power, this is provided by inverter min_energy()
|
||||
# This means, for the total coordinator we can only fetch and map the following:
|
||||
# todayEnergy -> today_energy
|
||||
# totalEnergy -> total_energy
|
||||
# invTodayPpv -> current_power
|
||||
total_info = self.api.plant_energy_overview(self.plant_id)
|
||||
total_info["todayEnergy"] = total_info["today_energy"]
|
||||
total_info["totalEnergy"] = total_info["total_energy"]
|
||||
total_info["invTodayPpv"] = total_info["current_power"]
|
||||
else:
|
||||
# Classic API: use plant_info as before
|
||||
total_info = self.api.plant_info(self.device_id)
|
||||
del total_info["deviceList"]
|
||||
plant_money_text, currency = total_info["plantMoneyText"].split("/")
|
||||
total_info["plantMoneyText"] = plant_money_text
|
||||
total_info["currency"] = currency
|
||||
_LOGGER.debug("Total info for plant %s: %r", self.plant_id, total_info)
|
||||
self.data = total_info
|
||||
elif self.device_type == "inverter":
|
||||
self.data = self.api.inverter_detail(self.device_id)
|
||||
elif self.device_type == "min":
|
||||
# Open API V1: min device
|
||||
try:
|
||||
min_details = self.api.min_detail(self.device_id)
|
||||
min_settings = self.api.min_settings(self.device_id)
|
||||
min_energy = self.api.min_energy(self.device_id)
|
||||
except growattServer.GrowattV1ApiError as err:
|
||||
_LOGGER.error(
|
||||
"Error fetching min device data for %s: %s", self.device_id, err
|
||||
)
|
||||
raise UpdateFailed(f"Error fetching min device data: {err}") from err
|
||||
|
||||
min_info = {**min_details, **min_settings, **min_energy}
|
||||
self.data = min_info
|
||||
_LOGGER.debug("min_info for device %s: %r", self.device_id, min_info)
|
||||
elif self.device_type == "tlx":
|
||||
tlx_info = self.api.tlx_detail(self.device_id)
|
||||
self.data = tlx_info["data"]
|
||||
_LOGGER.debug("tlx_info for device %s: %r", self.device_id, tlx_info)
|
||||
elif self.device_type == "storage":
|
||||
storage_info_detail = self.api.storage_params(self.device_id)
|
||||
storage_energy_overview = self.api.storage_energy_overview(
|
||||
@@ -188,7 +145,7 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
return self.data.get("currency")
|
||||
|
||||
def get_data(
|
||||
self, entity_description: GrowattSensorEntityDescription
|
||||
self, entity_description: "GrowattSensorEntityDescription"
|
||||
) -> str | int | float | None:
|
||||
"""Get the data."""
|
||||
variable = entity_description.api_key
|
||||
|
@@ -51,7 +51,7 @@ async def async_setup_entry(
|
||||
sensor_descriptions: list = []
|
||||
if device_coordinator.device_type == "inverter":
|
||||
sensor_descriptions = list(INVERTER_SENSOR_TYPES)
|
||||
elif device_coordinator.device_type in ("tlx", "min"):
|
||||
elif device_coordinator.device_type == "tlx":
|
||||
sensor_descriptions = list(TLX_SENSOR_TYPES)
|
||||
elif device_coordinator.device_type == "storage":
|
||||
sensor_descriptions = list(STORAGE_SENSOR_TYPES)
|
||||
|
@@ -2,42 +2,26 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_plants": "No plants have been found on this account"
|
||||
},
|
||||
"error": {
|
||||
"invalid_auth": "Authentication failed. Please check your credentials and try again.",
|
||||
"cannot_connect": "Cannot connect to Growatt servers. Please check your internet connection and try again."
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Choose authentication method",
|
||||
"description": "Note: API Token authentication is currently only supported for MIN/TLX devices. For other device types, please use Username & Password authentication.",
|
||||
"menu_options": {
|
||||
"password_auth": "Username & Password",
|
||||
"token_auth": "API Token (MIN/TLX only)"
|
||||
}
|
||||
},
|
||||
"password_auth": {
|
||||
"title": "Enter your Growatt login credentials",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"url": "[%key:common::config_flow::data::url%]"
|
||||
}
|
||||
},
|
||||
"token_auth": {
|
||||
"title": "Enter your API token",
|
||||
"description": "Token authentication is only supported for MIN/TLX devices. For other device types, please use username/password authentication.",
|
||||
"data": {
|
||||
"token": "API Token"
|
||||
}
|
||||
},
|
||||
"plant": {
|
||||
"data": {
|
||||
"plant_id": "Plant"
|
||||
},
|
||||
"title": "Select your plant"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"url": "[%key:common::config_flow::data::url%]"
|
||||
},
|
||||
"title": "Enter your Growatt information"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -174,9 +174,6 @@
|
||||
},
|
||||
"collected_items": {
|
||||
"default": "mdi:sack"
|
||||
},
|
||||
"last_checkin": {
|
||||
"default": "mdi:login-variant"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -54,7 +53,7 @@ PARALLEL_UPDATES = 1
|
||||
class HabiticaSensorEntityDescription(SensorEntityDescription):
|
||||
"""Habitica Sensor Description."""
|
||||
|
||||
value_fn: Callable[[UserData, ContentData], StateType | datetime]
|
||||
value_fn: Callable[[UserData, ContentData], StateType]
|
||||
attributes_fn: Callable[[UserData, ContentData], dict[str, Any] | None] | None = (
|
||||
None
|
||||
)
|
||||
@@ -115,7 +114,6 @@ class HabiticaSensorEntity(StrEnum):
|
||||
COLLECTED_ITEMS = "collected_items"
|
||||
BOSS_RAGE = "boss_rage"
|
||||
BOSS_RAGE_LIMIT = "boss_rage_limit"
|
||||
LAST_CHECKIN = "last_checkin"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
@@ -286,16 +284,6 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
translation_key=HabiticaSensorEntity.PENDING_QUEST_ITEMS,
|
||||
value_fn=pending_quest_items,
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.LAST_CHECKIN,
|
||||
translation_key=HabiticaSensorEntity.LAST_CHECKIN,
|
||||
value_fn=(
|
||||
lambda user, _: dt_util.as_local(last)
|
||||
if (last := user.auth.timestamps.loggedin)
|
||||
else None
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -411,7 +399,7 @@ class HabiticaSensor(HabiticaBase, SensorEntity):
|
||||
entity_description: HabiticaSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the device."""
|
||||
|
||||
return self.entity_description.value_fn(
|
||||
@@ -454,7 +442,7 @@ class HabiticaPartySensor(HabiticaPartyBase, SensorEntity):
|
||||
entity_description: HabiticaPartySensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the device."""
|
||||
|
||||
return self.entity_description.value_fn(
|
||||
|
@@ -290,9 +290,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"last_checkin": {
|
||||
"name": "Last check-in"
|
||||
},
|
||||
"health": {
|
||||
"name": "Health",
|
||||
"unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]"
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["aiohasupervisor==0.3.3"],
|
||||
"requirements": ["aiohasupervisor==0.3.3b0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -73,6 +73,7 @@ class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
|
||||
try:
|
||||
await supervisor_client.addons.start_addon(self._addon_slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.error("Failed to start addon %s: %s", self._addon_slug, err)
|
||||
raise HomeAssistantError(err) from err
|
||||
|
||||
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
|
||||
|
@@ -10,7 +10,6 @@ from homeassistant.components.homeassistant_hardware import firmware_config_flow
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
@@ -67,7 +66,6 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
"""Mixin for Home Assistant Connect ZBT-2 firmware methods."""
|
||||
|
||||
context: ConfigFlowContext
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
@@ -16,7 +16,6 @@ from homeassistant.components.homeassistant_hardware.update import (
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -157,7 +156,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Connect ZBT-2 firmware update entity."""
|
||||
|
||||
bootloader_reset_methods = [ResetTarget.RTS_DTR]
|
||||
bootloader_reset_type = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@@ -39,7 +39,6 @@ from .util import (
|
||||
FirmwareInfo,
|
||||
OwningAddon,
|
||||
OwningIntegration,
|
||||
ResetTarget,
|
||||
async_flash_silabs_firmware,
|
||||
get_otbr_addon_manager,
|
||||
guess_firmware_info,
|
||||
@@ -80,8 +79,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Base flow to install firmware."""
|
||||
|
||||
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
|
||||
BOOTLOADER_RESET_METHODS: list[ResetTarget] = [] # Default, subclasses may override
|
||||
|
||||
_picked_firmware_type: PickedFirmwareType
|
||||
_zigbee_flow_strategy: ZigbeeFlowStrategy = ZigbeeFlowStrategy.RECOMMENDED
|
||||
|
||||
@@ -277,7 +274,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
device=self._device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=expected_installed_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
bootloader_reset_type=None,
|
||||
progress_callback=lambda offset, total: self.async_update_progress(
|
||||
offset / total
|
||||
),
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
|
||||
"integration_type": "system",
|
||||
"requirements": [
|
||||
"universal-silabs-flasher==0.0.35",
|
||||
"universal-silabs-flasher==0.0.32",
|
||||
"ha-silabs-firmware-client==0.2.0"
|
||||
]
|
||||
}
|
||||
|
@@ -22,12 +22,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import FirmwareUpdateCoordinator
|
||||
from .helpers import async_register_firmware_info_callback
|
||||
from .util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
async_flash_silabs_firmware,
|
||||
)
|
||||
from .util import ApplicationType, FirmwareInfo, async_flash_silabs_firmware
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -86,7 +81,7 @@ class BaseFirmwareUpdateEntity(
|
||||
|
||||
# Subclasses provide the mapping between firmware types and entity descriptions
|
||||
entity_description: FirmwareUpdateEntityDescription
|
||||
bootloader_reset_methods: list[ResetTarget] = []
|
||||
bootloader_reset_type: str | None = None
|
||||
|
||||
_attr_supported_features = (
|
||||
UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS
|
||||
@@ -273,7 +268,7 @@ class BaseFirmwareUpdateEntity(
|
||||
device=self._current_device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=self.entity_description.expected_firmware_type,
|
||||
bootloader_reset_methods=self.bootloader_reset_methods,
|
||||
bootloader_reset_type=self.bootloader_reset_type,
|
||||
progress_callback=self._update_progress,
|
||||
)
|
||||
finally:
|
||||
|
@@ -4,16 +4,13 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Callable, Iterable, Sequence
|
||||
from collections.abc import AsyncIterator, Callable, Iterable
|
||||
from contextlib import AsyncExitStack, asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
|
||||
from universal_silabs_flasher.const import (
|
||||
ApplicationType as FlasherApplicationType,
|
||||
ResetTarget as FlasherResetTarget,
|
||||
)
|
||||
from universal_silabs_flasher.const import ApplicationType as FlasherApplicationType
|
||||
from universal_silabs_flasher.firmware import parse_firmware_image
|
||||
from universal_silabs_flasher.flasher import Flasher
|
||||
|
||||
@@ -62,18 +59,6 @@ class ApplicationType(StrEnum):
|
||||
return FlasherApplicationType(self.value)
|
||||
|
||||
|
||||
class ResetTarget(StrEnum):
|
||||
"""Methods to reset a device into bootloader mode."""
|
||||
|
||||
RTS_DTR = "rts_dtr"
|
||||
BAUDRATE = "baudrate"
|
||||
YELLOW = "yellow"
|
||||
|
||||
def as_flasher_reset_target(self) -> FlasherResetTarget:
|
||||
"""Convert the reset target enum into one compatible with USF."""
|
||||
return FlasherResetTarget(self.value)
|
||||
|
||||
|
||||
@singleton(OTBR_ADDON_MANAGER_DATA)
|
||||
@callback
|
||||
def get_otbr_addon_manager(hass: HomeAssistant) -> WaitingAddonManager:
|
||||
@@ -357,7 +342,7 @@ async def async_flash_silabs_firmware(
|
||||
device: str,
|
||||
fw_data: bytes,
|
||||
expected_installed_firmware_type: ApplicationType,
|
||||
bootloader_reset_methods: Sequence[ResetTarget] = (),
|
||||
bootloader_reset_type: str | None = None,
|
||||
progress_callback: Callable[[int, int], None] | None = None,
|
||||
) -> FirmwareInfo:
|
||||
"""Flash firmware to the SiLabs device."""
|
||||
@@ -374,9 +359,7 @@ async def async_flash_silabs_firmware(
|
||||
ApplicationType.SPINEL.as_flasher_application_type(),
|
||||
ApplicationType.CPC.as_flasher_application_type(),
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
),
|
||||
bootloader_reset=bootloader_reset_type,
|
||||
)
|
||||
|
||||
async with AsyncExitStack() as stack:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user